diff --git a/.github/workflows/build-and-push.yml b/.github/workflows/build-and-push.yml index 8226039f..0adb62b6 100644 --- a/.github/workflows/build-and-push.yml +++ b/.github/workflows/build-and-push.yml @@ -1,16 +1,14 @@ -name: "Build and push GARM images" +name: "Build GARM images" on: - workflow_call: + workflow_dispatch: inputs: push_to_project: description: "Project to build images for" - required: false - type: string + required: true default: "ghcr.io/cloudbase" ref: description: "Ref to build" - required: false - type: string + required: true default: "main" permissions: @@ -24,7 +22,7 @@ jobs: runs-on: ubuntu-latest steps: - name: "Checkout" - uses: actions/checkout@v4 + uses: actions/checkout@v3 with: path: src/github.com/cloudbase/garm fetch-depth: 0 @@ -39,23 +37,16 @@ jobs: username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - name: Build and push image - env: - IMAGE_REGISTRY: ${{ inputs.push_to_project }} - GH_REF: ${{ inputs.ref }} - working-directory: src/github.com/cloudbase/garm + - name: Build and push run: | - if [ "$GH_REF" == "main" ]; then - IMAGE_TAG="nightly" - else - IMAGE_TAG=$(git describe --tags --match='v[0-9]*' --always ${GH_REF}) - fi + cd src/github.com/cloudbase/garm + VERSION=$(git describe --tags --match='v[0-9]*' --always ${{ github.event.inputs.ref }}) docker buildx build \ --provenance=false \ --platform linux/amd64,linux/arm64 \ - --label "org.opencontainers.image.source=https://github.com/cloudbase/garm/tree/${GH_REF}" \ - --label "org.opencontainers.image.description=GARM ${GH_REF}" \ + --label "org.opencontainers.image.source=https://github.com/cloudbase/garm/tree/${{ github.event.inputs.ref }}" \ + --label "org.opencontainers.image.description=GARM ${{ github.event.inputs.ref }}" \ --label "org.opencontainers.image.licenses=Apache 2.0" \ - --build-arg="GARM_REF=${GH_REF}" \ - -t ${IMAGE_REGISTRY}/garm:"${IMAGE_TAG}" \ + --build-arg="GARM_REF=${{ github.event.inputs.ref }}" \ + -t ${{ github.event.inputs.push_to_project }}/garm:"${VERSION}" \ --push . diff --git a/.github/workflows/go-tests.yml b/.github/workflows/go-tests.yml index bde4f0f0..36d113d0 100644 --- a/.github/workflows/go-tests.yml +++ b/.github/workflows/go-tests.yml @@ -28,11 +28,10 @@ jobs: sudo apt-get update sudo apt-get install -y libbtrfs-dev build-essential apg jq - - uses: actions/checkout@v3 - uses: actions/setup-go@v5 with: - go-version-file: go.mod - + go-version: '^1.22.3' + - uses: actions/checkout@v3 - name: make lint run: make golangci-lint && GOLANGCI_LINT_EXTRA_ARGS="--timeout=8m --build-tags=testing,integration" make lint - name: Verify go vendor, go modules and gofmt @@ -44,39 +43,15 @@ jobs: runs-on: ubuntu-latest needs: [linters] steps: - - name: Install dependencies - run: | - sudo apt-get update - sudo apt-get install -y libbtrfs-dev build-essential apg jq default-jre - - - uses: actions/setup-node@v4 - with: - node-version: '>=v24.5.0' - - - name: Set up openapi-generator-cli - run: | - mkdir -p $HOME/openapi-generator - cd $HOME/openapi-generator - npm install @openapitools/openapi-generator-cli - echo "$HOME/openapi-generator/node_modules/.bin" >> $GITHUB_PATH - - name: Checkout uses: actions/checkout@v3 - name: Setup Golang - uses: actions/setup-go@v5 + uses: actions/setup-go@v3 with: go-version-file: go.mod - run: go version - - name: Run go generate - run: | - GOTOOLCHAIN=go1.24.6 make generate - - name: Run GARM Go Tests run: make go-test - - - name: Run web UI tests - run: | - make webui-test diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 04072b20..dc1a68b8 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -107,6 +107,7 @@ jobs: with: name: garm-logs path: /artifacts-logs + merge-multiple: true - name: Cleanup orphan GARM resources via GitHub API if: always() diff --git a/.github/workflows/trigger-manual.yml b/.github/workflows/trigger-manual.yml deleted file mode 100644 index faf166d4..00000000 --- a/.github/workflows/trigger-manual.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: Manual build of GARM images -on: - workflow_dispatch: - inputs: - push_to_project: - description: "Project to build images for" - required: true - default: "ghcr.io/cloudbase" - ref: - description: "Ref to build" - required: true - default: "main" - -jobs: - call-build-and-push: - uses: ./.github/workflows/build-and-push.yml - with: - push_to_project: ${{ inputs.push_to_project }} - ref: ${{ inputs.ref }} \ No newline at end of file diff --git a/.github/workflows/trigger-nightly.yml b/.github/workflows/trigger-nightly.yml deleted file mode 100644 index e0b83856..00000000 --- a/.github/workflows/trigger-nightly.yml +++ /dev/null @@ -1,10 +0,0 @@ -name: Nightly build of GARM images -on: - schedule: - - cron: "0 2 * * *" - -jobs: - call-build-and-push: - uses: ./.github/workflows/build-and-push.yml - with: - ref: "main" diff --git a/.gitignore b/.gitignore index 54c931c8..97f747e2 100644 --- a/.gitignore +++ b/.gitignore @@ -19,9 +19,3 @@ bin/ cmd/temp build/ release/ -node_modules/ -.svelte-kit/ -debug.html -git_push.sh -webapp/src/lib/api/generated/docs -.env diff --git a/.mockery.yaml b/.mockery.yaml deleted file mode 100644 index b7858821..00000000 --- a/.mockery.yaml +++ /dev/null @@ -1,27 +0,0 @@ -with-expecter: true -dir: "mocks" -mockname: "{{ .InterfaceName }}" -outpkg: "mocks" -filename: "{{ .InterfaceName }}.go" -# V3 compatibility settings -resolve-type-alias: false -disable-version-string: true -issue-845-fix: true -packages: - # Database store interfaces - github.com/cloudbase/garm/database/common: - interfaces: - Store: - config: - dir: "{{ .InterfaceDir }}/mocks" - # Runner interfaces - github.com/cloudbase/garm/runner: - interfaces: - PoolManagerController: - config: - dir: "{{ .InterfaceDir }}/mocks" - # Runner common interfaces (generate all interfaces in this package) - github.com/cloudbase/garm/runner/common: - config: - dir: "{{ .InterfaceDir }}/mocks" - all: true \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 81033292..66e78962 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,68 +1,52 @@ FROM docker.io/golang:alpine AS builder ARG GARM_REF +ARG AZURE_REF=v0.1.2 +ARG OPENSTACK_REF=v0.1.2 +ARG LXD_REF=v0.1.2 +ARG INCUS_REF=v0.1.2 +ARG AWS_REF=v0.1.3 +ARG GCP_REF=v0.1.2 +ARG EQUINIX_REF=v0.1.2 +ARG K8S_REF=v0.3.2 +ARG LINODE_REF=v0.2.0 LABEL stage=builder -RUN apk add --no-cache musl-dev gcc libtool m4 autoconf g++ make libblkid util-linux-dev git linux-headers upx curl jq -RUN git config --global --add safe.directory /build && git config --global --add advice.detachedHead false -RUN echo ${GARM_REF} +RUN apk add musl-dev gcc libtool m4 autoconf g++ make libblkid util-linux-dev git linux-headers upx +RUN git config --global --add safe.directory /build ADD . /build/garm +RUN cd /build/garm && git checkout ${GARM_REF} +RUN git clone --depth 1 --branch ${AZURE_REF} https://github.com/cloudbase/garm-provider-azure /build/garm-provider-azure +RUN git clone --depth 1 --branch ${OPENSTACK_REF} https://github.com/cloudbase/garm-provider-openstack /build/garm-provider-openstack +RUN git clone --depth 1 --branch ${LXD_REF} https://github.com/cloudbase/garm-provider-lxd /build/garm-provider-lxd +RUN git clone --depth 1 --branch ${INCUS_REF} https://github.com/cloudbase/garm-provider-incus /build/garm-provider-incus +RUN git clone --depth 1 --branch ${AWS_REF} https://github.com/cloudbase/garm-provider-aws /build/garm-provider-aws +RUN git clone --depth 1 --branch ${GCP_REF} https://github.com/cloudbase/garm-provider-gcp /build/garm-provider-gcp +RUN git clone --depth 1 --branch ${EQUINIX_REF} https://github.com/cloudbase/garm-provider-equinix /build/garm-provider-equinix +RUN git clone --depth 1 --branch ${LINODE_REF} https://github.com/flatcar/garm-provider-linode /build/garm-provider-linode -RUN git -C /build/garm checkout ${GARM_REF} -RUN cd /build/garm \ - && go build -o /bin/garm \ - -tags osusergo,netgo,sqlite_omit_load_extension \ - -ldflags "-linkmode external -extldflags '-static' -s -w -X github.com/cloudbase/garm/util/appdefaults.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" \ - /build/garm/cmd/garm && upx /bin/garm -RUN cd /build/garm/cmd/garm-cli \ - && go build -o /bin/garm-cli \ - -tags osusergo,netgo,sqlite_omit_load_extension \ - -ldflags "-linkmode external -extldflags '-static' -s -w -X github.com/cloudbase/garm/util/appdefaults.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" \ - . && upx /bin/garm-cli -RUN set -ex; \ - mkdir -p /opt/garm/providers.d; \ - for repo in \ - cloudbase/garm-provider-azure \ - cloudbase/garm-provider-openstack \ - cloudbase/garm-provider-lxd \ - cloudbase/garm-provider-incus \ - cloudbase/garm-provider-aws \ - cloudbase/garm-provider-gcp \ - cloudbase/garm-provider-equinix \ - flatcar/garm-provider-linode \ - mercedes-benz/garm-provider-k8s; \ - do \ - export PROVIDER_NAME="$(basename $repo)"; \ - export PROVIDER_SUBDIR=""; \ - if [ "$GARM_REF" == "main" ]; then \ - export PROVIDER_REF="main"; \ - else \ - export PROVIDER_REF="$(curl -s -L https://api.github.com/repos/$repo/releases/latest | jq -r '.tag_name')"; \ - fi; \ - git clone --branch "$PROVIDER_REF" "https://github.com/$repo" "/build/$PROVIDER_NAME"; \ - case $PROVIDER_NAME in \ - "garm-provider-k8s") \ - export PROVIDER_SUBDIR="cmd/garm-provider-k8s"; \ - export PROVIDER_LDFLAGS="-linkmode external -extldflags \"-static\" -s -w"; \ - ;; \ - "garm-provider-linode") \ - export PROVIDER_LDFLAGS="-linkmode external -extldflags \"-static\" -s -w"; \ - ;; \ - *) \ - export PROVIDER_VERSION=$(git -C /build/$PROVIDER_NAME describe --tags --match='v[0-9]*' --dirty --always); \ - export PROVIDER_LDFLAGS="-linkmode external -extldflags \"-static\" -s -w -X main.Version=$PROVIDER_VERSION"; \ - ;; \ - esac; \ - cd "/build/$PROVIDER_NAME/$PROVIDER_SUBDIR" \ - && go build -ldflags="$PROVIDER_LDFLAGS" -o /opt/garm/providers.d/$PROVIDER_NAME . \ - && upx /opt/garm/providers.d/$PROVIDER_NAME; \ - done +RUN git clone --depth 1 --branch v0.3.1 https://github.com/mercedes-benz/garm-provider-k8s /build/garm-provider-k8s + +RUN cd /build/garm && go build -o /bin/garm \ + -tags osusergo,netgo,sqlite_omit_load_extension \ + -ldflags "-linkmode external -extldflags '-static' -s -w -X github.com/cloudbase/garm/util/appdefaults.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" \ + /build/garm/cmd/garm && upx /bin/garm +RUN mkdir -p /opt/garm/providers.d +RUN cd /build/garm-provider-azure && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=${AZURE_REF}" -o /opt/garm/providers.d/garm-provider-azure . && upx /opt/garm/providers.d/garm-provider-azure +RUN cd /build/garm-provider-openstack && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=${OPENSTACK_REF}" -o /opt/garm/providers.d/garm-provider-openstack . && upx /opt/garm/providers.d/garm-provider-openstack +RUN cd /build/garm-provider-lxd && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=${LXD_REF}" -o /opt/garm/providers.d/garm-provider-lxd . && upx /opt/garm/providers.d/garm-provider-lxd +RUN cd /build/garm-provider-incus && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=${INCUS_REF}" -o /opt/garm/providers.d/garm-provider-incus . && upx /opt/garm/providers.d/garm-provider-incus +RUN cd /build/garm-provider-aws && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=${AWS_REF}" -o /opt/garm/providers.d/garm-provider-aws . && upx /opt/garm/providers.d/garm-provider-aws +RUN cd /build/garm-provider-gcp && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=${GCP_REF}" -o /opt/garm/providers.d/garm-provider-gcp . && upx /opt/garm/providers.d/garm-provider-gcp +RUN cd /build/garm-provider-equinix && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=${EQUINIX_REF}" -o /opt/garm/providers.d/garm-provider-equinix . && upx /opt/garm/providers.d/garm-provider-equinix +RUN cd /build/garm-provider-linode && go build -ldflags="-linkmode external -extldflags '-static' -s -w" -o /opt/garm/providers.d/garm-provider-linode . && upx /opt/garm/providers.d/garm-provider-linode + +RUN cd /build/garm-provider-k8s/cmd/garm-provider-k8s && go build -ldflags="-linkmode external -extldflags '-static' -s -w" -o /opt/garm/providers.d/garm-provider-k8s . && upx /opt/garm/providers.d/garm-provider-k8s FROM busybox COPY --from=builder /bin/garm /bin/garm -COPY --from=builder /bin/garm-cli /bin/garm-cli COPY --from=builder /opt/garm/providers.d/garm-provider-openstack /opt/garm/providers.d/garm-provider-openstack COPY --from=builder /opt/garm/providers.d/garm-provider-lxd /opt/garm/providers.d/garm-provider-lxd COPY --from=builder /opt/garm/providers.d/garm-provider-incus /opt/garm/providers.d/garm-provider-incus @@ -70,6 +54,7 @@ COPY --from=builder /opt/garm/providers.d/garm-provider-azure /opt/garm/provider COPY --from=builder /opt/garm/providers.d/garm-provider-aws /opt/garm/providers.d/garm-provider-aws COPY --from=builder /opt/garm/providers.d/garm-provider-gcp /opt/garm/providers.d/garm-provider-gcp COPY --from=builder /opt/garm/providers.d/garm-provider-equinix /opt/garm/providers.d/garm-provider-equinix +COPY --from=builder /opt/garm/providers.d/garm-provider-linode /opt/garm/providers.d/garm-provider-linode COPY --from=builder /opt/garm/providers.d/garm-provider-k8s /opt/garm/providers.d/garm-provider-k8s COPY --from=builder /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ diff --git a/Makefile b/Makefile index 714d2465..f5337783 100644 --- a/Makefile +++ b/Makefile @@ -6,10 +6,8 @@ export SHELLOPTS:=$(if $(SHELLOPTS),$(SHELLOPTS):)pipefail:errexit GEN_PASSWORD=$(shell (/usr/bin/apg -n1 -m32)) IMAGE_TAG = garm-build -IMAGE_BUILDER=$(shell (which docker || which podman)) -IS_PODMAN=$(shell (($(IMAGE_BUILDER) --version | grep -q podman) && echo "yes" || echo "no")) -USER_ID=$(if $(filter yes,$(IS_PODMAN)),0,$(shell id -u)) -USER_GROUP=$(if $(filter yes,$(IS_PODMAN)),0,$(shell id -g)) +USER_ID=$(shell ((docker --version | grep -q podman) && echo "0" || id -u)) +USER_GROUP=$(shell ((docker --version | grep -q podman) && echo "0" || id -g)) ROOTDIR=$(dir $(abspath $(lastword $(MAKEFILE_LIST)))) GOPATH ?= $(shell go env GOPATH) VERSION ?= $(shell git describe --tags --match='v[0-9]*' --dirty --always) @@ -22,11 +20,6 @@ export CREDENTIALS_NAME ?= test-garm-creds export WORKFLOW_FILE_NAME ?= test.yml export GARM_ADMIN_USERNAME ?= admin -ifeq ($(IS_PODMAN),yes) - EXTRA_ARGS := -v /etc/ssl/certs/ca-certificates.crt:/etc/ssl/certs/ca-certificates.crt -endif - - .PHONY: help help: ## Display this help. @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[a-zA-Z_0-9-]+:.*?##/ { printf " \033[36m%-20s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST) @@ -39,9 +32,9 @@ default: build .PHONY : build-static test install-lint-deps lint go-test fmt fmtcheck verify-vendor verify create-release-files release build-static: ## Build garm statically @echo Building garm - $(IMAGE_BUILDER) build $(EXTRA_ARGS) --tag $(IMAGE_TAG) -f Dockerfile.build-static . + docker build --tag $(IMAGE_TAG) -f Dockerfile.build-static . mkdir -p build - $(IMAGE_BUILDER) run --rm -e USER_ID=$(USER_ID) -e GARM_REF=$(GARM_REF) -e USER_GROUP=$(USER_GROUP) -v $(PWD)/build:/build/output:z $(IMAGE_TAG) /build-static.sh + docker run --rm -e USER_ID=$(USER_ID) -e GARM_REF=$(GARM_REF) -e USER_GROUP=$(USER_GROUP) -v $(PWD)/build:/build/output:z $(IMAGE_TAG) /build-static.sh @echo Binaries are available in $(PWD)/build clean: ## Clean up build artifacts @@ -55,20 +48,6 @@ build: ## Build garm @$(GO) build -ldflags "-s -w -X github.com/cloudbase/garm/util/appdefaults.Version=${VERSION}" -tags osusergo,netgo,sqlite_omit_load_extension -o bin/garm-cli ./cmd/garm-cli @echo Binaries are available in $(PWD)/bin -.PHONY: build-webui -build-webui: - @echo Building GARM web ui - ./build-webapp.sh - rm -rf webapp/assets/_app - cp -r webapp/build/* webapp/assets/ - -.PHONY: generate -generate: ## Run go generate after checking required tools are in PATH - @echo Checking required tools... - @which openapi-generator-cli > /dev/null || (echo "Error: openapi-generator-cli not found in PATH" && exit 1) - @echo Running go generate - @$(GO) generate ./... - test: verify go-test ## Run tests ##@ Release @@ -115,9 +94,6 @@ go-test: ## Run tests fmt: ## Run go fmt against code. @$(GO) fmt $$(go list ./...) -webui-test: - (cd webapp && npm install) - (cd webapp && npm run test:run) ##@ Build Dependencies diff --git a/README.md b/README.md index 24fbbcc4..390dc15b 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,5 @@ - -

- Light mode image - Dark mode image -

- # GitHub Actions Runner Manager (GARM) -[![Go Tests](https://github.com/cloudbase/garm/actions/workflows/go-tests.yml/badge.svg)](https://github.com/cloudbase/garm/actions/workflows/go-tests.yml) - - [GitHub Actions Runner Manager GARM](#github-actions-runner-manager-garm) @@ -16,9 +8,6 @@ - [Installing](#installing) - [Quickstart](#quickstart) - [Installing on Kubernetes](#installing-on-kubernetes) - - [Configuring GARM for GHES](#configuring-garm-for-ghes) - - [Configuring GARM for Gitea](#configuring-garm-for-gitea) - - [Enabling the web UI](#enabling-the-web-ui) - [Using GARM](#using-garm) - [Supported providers](#supported-providers) - [Installing external providers](#installing-external-providers) @@ -31,29 +20,20 @@ Welcome to GARM! -GARM enables you to create and automatically maintain pools of self-hosted runners in both [Github](https://docs.github.com/en/actions/hosting-your-own-runners/about-self-hosted-runners) and [Gitea](https://github.com/go-gitea/gitea/) with auto-scaling that can be used inside your workflow runs. +GARM enables you to create and automatically maintain pools of [self-hosted GitHub runners](https://docs.github.com/en/actions/hosting-your-own-runners/about-self-hosted-runners), with auto-scaling that can be used inside your github workflow runs. -The goal of ```GARM``` is to be simple to set up, simple to configure and simple to use. The server itself is a single binary that can run on any GNU/Linux machine without any other requirements other than the providers you want to enable in your setup. It is intended to be easy to deploy in any environment and can create runners in virtually any system you can write a provider for (if one does not alreay exist). There is no complicated setup process and no extremely complex concepts to understand. Once set up, it's meant to stay out of your way. +The goal of ```GARM``` is to be simple to set up, simple to configure and simple to use. The server itself is a single binary that can run on any GNU/Linux machine without any other requirements other than the providers you want to enable in your setup. It is intended to be easy to deploy in any environment and can create runners in virtually any system you can write a provider for. There is no complicated setup process and no extremely complex concepts to understand. Once set up, it's meant to stay out of your way. -Through the use of providers, `GARM` can create runners in a variety of environments using the same `GARM` instance. Whether you want to create runners in your OpenStack cloud, your Azure cloud or your Kubernetes cluster, that is easily achieved by installing the appropriate providers, configuring them in `GARM` and creating pools that use them. You can create zero-runner pools for instances with high costs (large VMs, GPU enabled instances, etc) and have them spin up on demand, or you can create large pools of eagerly created k8s backed runners that can be used for your CI/CD pipelines at a moment's notice. You can mix them up and create pools in any combination of providers or resource allocations you want. +GARM supports creating pools in either GitHub itself or in your own deployment of [GitHub Enterprise Server](https://docs.github.com/en/enterprise-server@3.10/admin/overview/about-github-enterprise-server). For instructions on how to use ```GARM``` with GHE, see the [credentials](/doc/github_credentials.md) section of the documentation. -GARM supports two modes of operation: +Through the use of providers, `GARM` can create runners in a variety of environments using the same `GARM` instance. Whether you want to create pools of runners in your OpenStack cloud, your Azure cloud or your Kubernetes cluster, that is easily achieved by just installing the appropriate providers, configuring them in `GARM` and creating pools that use them. You can create zero-runner pools for instances with high costs (large VMs, GPU enabled instances, etc) and have them spin up on demand, or you can create large pools of eagerly created k8s backed runners that can be used for your CI/CD pipelines at a moment's notice. You can mix them up and create pools in any combination of providers or resource allocations you want. -* Pools -* Scale sets +Here is a brief architectural diagram of how GARM reacts to workflows triggered in GitHub (click the image to see a larger version): -Here is a brief architectural diagram of how pools work and how GARM reacts to workflows triggered in GitHub (click the image to see a larger version): +![GARM architecture diagram](/doc/images/garm-light.drawio.svg?raw=true#gh-light-mode-only) +![GARM architecture diagram](/doc/images/garm-dark.drawio.svg?raw=true#gh-dark-mode-only) -![GARM architecture diagram](/doc/images/garm-light.diagram.svg?raw=true#gh-light-mode-only) -![GARM architecture diagram](/doc/images/garm-dark.diagram.svg?raw=true#gh-dark-mode-only) - -**Scale sets** work differently. While pools (as they are defined in GARM) rely on webhooks to know when a job was started and GARM needs to internally make the right decission in terms of which pool should handle that runner, scale sets have a lot of the scheduling and decission making logic done in GitHub itself. - -> [!IMPORTANT] -> The README and documentation in the `main` branch are relevant to the not yet released code that is present in `main`. Following the documentation from the `main` branch for a stable release of GARM, may lead to errors. To view the documentation for the latest stable release, please switch to the appropriate tag. For information about setting up `v0.1.6`, please refer to the [v0.1.6 tag](https://github.com/cloudbase/garm/tree/v0.1.6). - -> [!CAUTION] -> The `main` branch holds the latest code and is not guaranteed to be stable. If you are looking for a stable release, please check the releases page. If you plan to use the `main` branch, please do so on a new instance. Do not upgrade from a stable release to `main`. +:warning: **Important note**: The README and documentation in the `main` branch are relevant to the not yet released code that is present in `main`. Following the documentation from the `main` branch for a stable release of GARM, may lead to errors. To view the documentation for the latest stable release, please switch to the appropriate tag. For information about setting up `v0.1.6`, please refer to the [v0.1.6 tag](https://github.com/cloudbase/garm/tree/v0.1.6). ## Join us on slack @@ -71,25 +51,6 @@ Check out the [quickstart](/doc/quickstart.md) document for instructions on how Thanks to the efforts of the amazing folks at [@mercedes-benz](https://github.com/mercedes-benz/), GARM can now be integrated into k8s via their operator. Check out the [GARM operator](https://github.com/mercedes-benz/garm-operator/) for more details. -## Configuring GARM for GHES - -GARM supports creating pools and scale sets in either GitHub itself or in your own deployment of [GitHub Enterprise Server](https://docs.github.com/en/enterprise-server@3.10/admin/overview/about-github-enterprise-server). For instructions on how to use ```GARM``` with GHE, see the [credentials](/doc/github_credentials.md) section of the documentation. - -## Configuring GARM for Gitea - -GARM now has support for Gitea (>=1.24.0). For information on getting started with Gitea, see the [Gitea quickstart](/doc/gitea.md) document. - -## Enabling the web UI - -GARM now ships with a single page application. To enable it, add the following to your GARM config: - -```toml -[apiserver.webui] - enable = true -``` - -Check the [README.md](/webapp/README.md) file for details on the web UI. - ## Using GARM GARM is designed with simplicity in mind. At least we try to keep it as simple as possible. We're aware that adding a new tool in your workflow can be painful, especially when you already have to deal with so many. The cognitive load for OPS has reached a level where it feels overwhelming at times to even wrap your head around a new tool. As such, we believe that tools should be simple, should take no more than a few hours to understand and set up and if you absolutely need to interact with the tool, it should be as intuitive as possible. Although we try our best to make this happen, we're aware that GARM has some rough edges, especially for new users. If you encounter issues or feel like the setup process was too complicated, please let us know. We're always looking to improve the user experience. @@ -108,15 +69,14 @@ GARM uses providers to create runners in a particular IaaS. The providers are ex External providers are binaries that GARM calls into to create runners in a particular IaaS. There are several external providers available: -* [Akamai/Linode](https://github.com/flatcar/garm-provider-linode) - Experimental -* [Amazon EC2](https://github.com/cloudbase/garm-provider-aws) +* [OpenStack](https://github.com/cloudbase/garm-provider-openstack) * [Azure](https://github.com/cloudbase/garm-provider-azure) -* [Equinix Metal](https://github.com/cloudbase/garm-provider-equinix) -* [Google Cloud Platform (GCP)](https://github.com/cloudbase/garm-provider-gcp) -* [Incus](https://github.com/cloudbase/garm-provider-incus) * [Kubernetes](https://github.com/mercedes-benz/garm-provider-k8s) - Thanks to the amazing folks at @mercedes-benz for sharing their awesome provider! * [LXD](https://github.com/cloudbase/garm-provider-lxd) -* [OpenStack](https://github.com/cloudbase/garm-provider-openstack) +* [Incus](https://github.com/cloudbase/garm-provider-incus) +* [Equinix Metal](https://github.com/cloudbase/garm-provider-equinix) +* [Amazon EC2](https://github.com/cloudbase/garm-provider-aws) +* [Google Cloud Platform (GCP)](https://github.com/cloudbase/garm-provider-gcp) * [Oracle Cloud Infrastructure (OCI)](https://github.com/cloudbase/garm-provider-oci) Follow the instructions in the README of each provider to install them. diff --git a/apiserver/controllers/controllers.go b/apiserver/controllers/controllers.go index 019671eb..d8750a50 100644 --- a/apiserver/controllers/controllers.go +++ b/apiserver/controllers/controllers.go @@ -17,57 +17,30 @@ package controllers import ( "context" "encoding/json" - "errors" - "fmt" "io" "log/slog" "net/http" - "net/url" "strings" "github.com/gorilla/mux" "github.com/gorilla/websocket" + "github.com/pkg/errors" gErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm-provider-common/util" + "github.com/cloudbase/garm/apiserver/events" "github.com/cloudbase/garm/apiserver/params" "github.com/cloudbase/garm/auth" - "github.com/cloudbase/garm/config" "github.com/cloudbase/garm/metrics" runnerParams "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner" //nolint:typecheck - garmUtil "github.com/cloudbase/garm/util" wsWriter "github.com/cloudbase/garm/websocket" - "github.com/cloudbase/garm/workers/websocket/events" ) -func NewAPIController(r *runner.Runner, authenticator *auth.Authenticator, hub *wsWriter.Hub, apiCfg config.APIServer) (*APIController, error) { +func NewAPIController(r *runner.Runner, authenticator *auth.Authenticator, hub *wsWriter.Hub) (*APIController, error) { controllerInfo, err := r.GetControllerInfo(auth.GetAdminContext(context.Background())) if err != nil { - return nil, fmt.Errorf("failed to get controller info: %w", err) - } - var checkOrigin func(r *http.Request) bool - if len(apiCfg.CORSOrigins) > 0 { - checkOrigin = func(r *http.Request) bool { - origin := r.Header["Origin"] - if len(origin) == 0 { - return true - } - u, err := url.Parse(origin[0]) - if err != nil { - return false - } - for _, val := range apiCfg.CORSOrigins { - corsVal, err := url.Parse(val) - if err != nil { - continue - } - if garmUtil.ASCIIEqualFold(u.Host, corsVal.Host) { - return true - } - } - return false - } + return nil, errors.Wrap(err, "failed to get controller info") } return &APIController{ r: r, @@ -76,7 +49,6 @@ func NewAPIController(r *runner.Runner, authenticator *auth.Authenticator, hub * upgrader: websocket.Upgrader{ ReadBufferSize: 1024, WriteBufferSize: 16384, - CheckOrigin: checkOrigin, }, controllerID: controllerInfo.ControllerID.String(), }, nil @@ -92,22 +64,24 @@ type APIController struct { func handleError(ctx context.Context, w http.ResponseWriter, err error) { w.Header().Set("Content-Type", "application/json") + origErr := errors.Cause(err) apiErr := params.APIErrorResponse{ - Details: err.Error(), + Details: origErr.Error(), } - switch { - case errors.Is(err, gErrors.ErrNotFound): + + switch origErr.(type) { + case *gErrors.NotFoundError: w.WriteHeader(http.StatusNotFound) apiErr.Error = "Not Found" - case errors.Is(err, gErrors.ErrUnauthorized): + case *gErrors.UnauthorizedError: w.WriteHeader(http.StatusUnauthorized) apiErr.Error = "Not Authorized" // Don't include details on 401 errors. apiErr.Details = "" - case errors.Is(err, gErrors.ErrBadRequest): + case *gErrors.BadRequestError: w.WriteHeader(http.StatusBadRequest) apiErr.Error = "Bad Request" - case errors.Is(err, gErrors.ErrDuplicateEntity), errors.Is(err, &gErrors.ConflictError{}): + case *gErrors.DuplicateUserError, *gErrors.ConflictError: w.WriteHeader(http.StatusConflict) apiErr.Error = "Conflict" default: @@ -132,15 +106,8 @@ func (a *APIController) handleWorkflowJobEvent(ctx context.Context, w http.Respo signature := r.Header.Get("X-Hub-Signature-256") hookType := r.Header.Get("X-Github-Hook-Installation-Target-Type") - giteaTargetType := r.Header.Get("X-Gitea-Hook-Installation-Target-Type") - forgeType := runnerParams.GithubEndpointType - if giteaTargetType != "" { - forgeType = runnerParams.GiteaEndpointType - hookType = giteaTargetType - } - - if err := a.r.DispatchWorkflowJob(hookType, signature, forgeType, body); err != nil { + if err := a.r.DispatchWorkflowJob(hookType, signature, body); err != nil { switch { case errors.Is(err, gErrors.ErrNotFound): metrics.WebhooksReceived.WithLabelValues( diff --git a/apiserver/controllers/github_credentials.go b/apiserver/controllers/credentials.go similarity index 90% rename from apiserver/controllers/github_credentials.go rename to apiserver/controllers/credentials.go index 04e087e5..70869b54 100644 --- a/apiserver/controllers/github_credentials.go +++ b/apiserver/controllers/credentials.go @@ -1,16 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package controllers import ( @@ -60,7 +47,7 @@ func (a *APIController) ListCredentials(w http.ResponseWriter, r *http.Request) // required: true // // Responses: -// 200: ForgeCredentials +// 200: GithubCredentials // 400: APIErrorResponse func (a *APIController) CreateGithubCredential(w http.ResponseWriter, r *http.Request) { ctx := r.Context() @@ -96,7 +83,7 @@ func (a *APIController) CreateGithubCredential(w http.ResponseWriter, r *http.Re // required: true // // Responses: -// 200: ForgeCredentials +// 200: GithubCredentials // 400: APIErrorResponse func (a *APIController) GetGithubCredential(w http.ResponseWriter, r *http.Request) { ctx := r.Context() @@ -196,7 +183,7 @@ func (a *APIController) DeleteGithubCredential(w http.ResponseWriter, r *http.Re // required: true // // Responses: -// 200: ForgeCredentials +// 200: GithubCredentials // 400: APIErrorResponse func (a *APIController) UpdateGithubCredential(w http.ResponseWriter, r *http.Request) { ctx := r.Context() diff --git a/apiserver/controllers/github_endpoints.go b/apiserver/controllers/endpoints.go similarity index 88% rename from apiserver/controllers/github_endpoints.go rename to apiserver/controllers/endpoints.go index 482f9d03..81e984d4 100644 --- a/apiserver/controllers/github_endpoints.go +++ b/apiserver/controllers/endpoints.go @@ -1,16 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package controllers import ( @@ -36,7 +23,7 @@ import ( // required: true // // Responses: -// 200: ForgeEndpoint +// 200: GithubEndpoint // default: APIErrorResponse func (a *APIController) CreateGithubEndpoint(w http.ResponseWriter, r *http.Request) { ctx := r.Context() @@ -65,7 +52,7 @@ func (a *APIController) CreateGithubEndpoint(w http.ResponseWriter, r *http.Requ // List all GitHub Endpoints. // // Responses: -// 200: ForgeEndpoints +// 200: GithubEndpoints // default: APIErrorResponse func (a *APIController) ListGithubEndpoints(w http.ResponseWriter, r *http.Request) { ctx := r.Context() @@ -94,7 +81,7 @@ func (a *APIController) ListGithubEndpoints(w http.ResponseWriter, r *http.Reque // required: true // // Responses: -// 200: ForgeEndpoint +// 200: GithubEndpoint // default: APIErrorResponse func (a *APIController) GetGithubEndpoint(w http.ResponseWriter, r *http.Request) { ctx := r.Context() @@ -166,7 +153,7 @@ func (a *APIController) DeleteGithubEndpoint(w http.ResponseWriter, r *http.Requ // required: true // // Responses: -// 200: ForgeEndpoint +// 200: GithubEndpoint // default: APIErrorResponse func (a *APIController) UpdateGithubEndpoint(w http.ResponseWriter, r *http.Request) { ctx := r.Context() diff --git a/apiserver/controllers/enterprises.go b/apiserver/controllers/enterprises.go index b4b3e528..d4b20826 100644 --- a/apiserver/controllers/enterprises.go +++ b/apiserver/controllers/enterprises.go @@ -66,30 +66,13 @@ func (a *APIController) CreateEnterpriseHandler(w http.ResponseWriter, r *http.R // // List all enterprises. // -// Parameters: -// + name: name -// description: Exact enterprise name to filter by -// type: string -// in: query -// required: false -// -// + name: endpoint -// description: Exact endpoint name to filter by -// type: string -// in: query -// required: false -// // Responses: // 200: Enterprises // default: APIErrorResponse func (a *APIController) ListEnterprisesHandler(w http.ResponseWriter, r *http.Request) { ctx := r.Context() - filter := runnerParams.EnterpriseFilter{ - Name: r.URL.Query().Get("name"), - Endpoint: r.URL.Query().Get("endpoint"), - } - enterprise, err := a.r.ListEnterprises(ctx, filter) + enterprise, err := a.r.ListEnterprises(ctx) if err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "listing enterprise") handleError(ctx, w, err) @@ -294,62 +277,6 @@ func (a *APIController) CreateEnterprisePoolHandler(w http.ResponseWriter, r *ht } } -// swagger:route POST /enterprises/{enterpriseID}/scalesets enterprises scalesets CreateEnterpriseScaleSet -// -// Create enterprise pool with the parameters given. -// -// Parameters: -// + name: enterpriseID -// description: Enterprise ID. -// type: string -// in: path -// required: true -// -// + name: Body -// description: Parameters used when creating the enterprise scale set. -// type: CreateScaleSetParams -// in: body -// required: true -// -// Responses: -// 200: ScaleSet -// default: APIErrorResponse -func (a *APIController) CreateEnterpriseScaleSetHandler(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - - vars := mux.Vars(r) - enterpriseID, ok := vars["enterpriseID"] - if !ok { - w.WriteHeader(http.StatusBadRequest) - if err := json.NewEncoder(w).Encode(params.APIErrorResponse{ - Error: "Bad Request", - Details: "No enterprise ID specified", - }); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } - return - } - - var scaleSetData runnerParams.CreateScaleSetParams - if err := json.NewDecoder(r.Body).Decode(&scaleSetData); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to decode") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - - scaleSet, err := a.r.CreateEntityScaleSet(ctx, runnerParams.ForgeEntityTypeEnterprise, enterpriseID, scaleSetData) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "error creating enterprise scale set") - handleError(ctx, w, err) - return - } - - w.Header().Set("Content-Type", "application/json") - if err := json.NewEncoder(w).Encode(scaleSet); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } -} - // swagger:route GET /enterprises/{enterpriseID}/pools enterprises pools ListEnterprisePools // // List enterprise pools. @@ -392,48 +319,6 @@ func (a *APIController) ListEnterprisePoolsHandler(w http.ResponseWriter, r *htt } } -// swagger:route GET /enterprises/{enterpriseID}/scalesets enterprises scalesets ListEnterpriseScaleSets -// -// List enterprise scale sets. -// -// Parameters: -// + name: enterpriseID -// description: Enterprise ID. -// type: string -// in: path -// required: true -// -// Responses: -// 200: ScaleSets -// default: APIErrorResponse -func (a *APIController) ListEnterpriseScaleSetsHandler(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - vars := mux.Vars(r) - enterpriseID, ok := vars["enterpriseID"] - if !ok { - w.WriteHeader(http.StatusBadRequest) - if err := json.NewEncoder(w).Encode(params.APIErrorResponse{ - Error: "Bad Request", - Details: "No enterprise ID specified", - }); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } - return - } - - scaleSets, err := a.r.ListEntityScaleSets(ctx, runnerParams.ForgeEntityTypeEnterprise, enterpriseID) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "listing scale sets") - handleError(ctx, w, err) - return - } - - w.Header().Set("Content-Type", "application/json") - if err := json.NewEncoder(w).Encode(scaleSets); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } -} - // swagger:route GET /enterprises/{enterpriseID}/pools/{poolID} enterprises pools GetEnterprisePool // // Get enterprise pool by ID. diff --git a/apiserver/controllers/gitea_credentials.go b/apiserver/controllers/gitea_credentials.go deleted file mode 100644 index 777be982..00000000 --- a/apiserver/controllers/gitea_credentials.go +++ /dev/null @@ -1,241 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package controllers - -import ( - "encoding/json" - "log/slog" - "math" - "net/http" - "strconv" - - "github.com/gorilla/mux" - - gErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm/params" -) - -// swagger:route GET /gitea/credentials credentials ListGiteaCredentials -// -// List all credentials. -// -// Responses: -// 200: Credentials -// 400: APIErrorResponse -func (a *APIController) ListGiteaCredentials(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - creds, err := a.r.ListGiteaCredentials(ctx) - if err != nil { - handleError(ctx, w, err) - return - } - - w.Header().Set("Content-Type", "application/json") - if err := json.NewEncoder(w).Encode(creds); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } -} - -// swagger:route POST /gitea/credentials credentials CreateGiteaCredentials -// -// Create a Gitea credential. -// -// Parameters: -// + name: Body -// description: Parameters used when creating a Gitea credential. -// type: CreateGiteaCredentialsParams -// in: body -// required: true -// -// Responses: -// 200: ForgeCredentials -// 400: APIErrorResponse -func (a *APIController) CreateGiteaCredential(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - - var params params.CreateGiteaCredentialsParams - if err := json.NewDecoder(r.Body).Decode(¶ms); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to decode request") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - - cred, err := a.r.CreateGiteaCredentials(ctx, params) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to create Gitea credential") - handleError(ctx, w, err) - return - } - w.Header().Set("Content-Type", "application/json") - if err := json.NewEncoder(w).Encode(cred); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } -} - -// swagger:route GET /gitea/credentials/{id} credentials GetGiteaCredentials -// -// Get a Gitea credential. -// -// Parameters: -// + name: id -// description: ID of the Gitea credential. -// type: integer -// in: path -// required: true -// -// Responses: -// 200: ForgeCredentials -// 400: APIErrorResponse -func (a *APIController) GetGiteaCredential(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - vars := mux.Vars(r) - idParam, ok := vars["id"] - if !ok { - slog.ErrorContext(ctx, "missing id in request") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - - id, err := strconv.ParseUint(idParam, 10, 64) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to parse id") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - - if id > math.MaxUint { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "id is too large") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - - cred, err := a.r.GetGiteaCredentials(ctx, uint(id)) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to get Gitea credential") - handleError(ctx, w, err) - return - } - - w.Header().Set("Content-Type", "application/json") - if err := json.NewEncoder(w).Encode(cred); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } -} - -// swagger:route DELETE /gitea/credentials/{id} credentials DeleteGiteaCredentials -// -// Delete a Gitea credential. -// -// Parameters: -// + name: id -// description: ID of the Gitea credential. -// type: integer -// in: path -// required: true -// -// Responses: -// default: APIErrorResponse -func (a *APIController) DeleteGiteaCredential(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - vars := mux.Vars(r) - idParam, ok := vars["id"] - if !ok { - slog.ErrorContext(ctx, "missing id in request") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - - id, err := strconv.ParseUint(idParam, 10, 64) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to parse id") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - - if id > math.MaxUint { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "id is too large") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - - if err := a.r.DeleteGiteaCredentials(ctx, uint(id)); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to delete Gitea credential") - handleError(ctx, w, err) - return - } - - w.WriteHeader(http.StatusNoContent) -} - -// swagger:route PUT /gitea/credentials/{id} credentials UpdateGiteaCredentials -// -// Update a Gitea credential. -// -// Parameters: -// + name: id -// description: ID of the Gitea credential. -// type: integer -// in: path -// required: true -// + name: Body -// description: Parameters used when updating a Gitea credential. -// type: UpdateGiteaCredentialsParams -// in: body -// required: true -// -// Responses: -// 200: ForgeCredentials -// 400: APIErrorResponse -func (a *APIController) UpdateGiteaCredential(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - vars := mux.Vars(r) - idParam, ok := vars["id"] - if !ok { - slog.ErrorContext(ctx, "missing id in request") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - - id, err := strconv.ParseUint(idParam, 10, 64) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to parse id") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - - if id > math.MaxUint { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "id is too large") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - - var params params.UpdateGiteaCredentialsParams - if err := json.NewDecoder(r.Body).Decode(¶ms); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to decode request") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - - cred, err := a.r.UpdateGiteaCredentials(ctx, uint(id), params) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to update Gitea credential") - handleError(ctx, w, err) - return - } - - w.Header().Set("Content-Type", "application/json") - if err := json.NewEncoder(w).Encode(cred); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } -} diff --git a/apiserver/controllers/gitea_endpoints.go b/apiserver/controllers/gitea_endpoints.go deleted file mode 100644 index 67e85178..00000000 --- a/apiserver/controllers/gitea_endpoints.go +++ /dev/null @@ -1,199 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package controllers - -import ( - "encoding/json" - "log/slog" - "net/http" - - "github.com/gorilla/mux" - - gErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm/params" -) - -// swagger:route POST /gitea/endpoints endpoints CreateGiteaEndpoint -// -// Create a Gitea Endpoint. -// -// Parameters: -// + name: Body -// description: Parameters used when creating a Gitea endpoint. -// type: CreateGiteaEndpointParams -// in: body -// required: true -// -// Responses: -// 200: ForgeEndpoint -// default: APIErrorResponse -func (a *APIController) CreateGiteaEndpoint(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - - var params params.CreateGiteaEndpointParams - if err := json.NewDecoder(r.Body).Decode(¶ms); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to decode request") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - - endpoint, err := a.r.CreateGiteaEndpoint(ctx, params) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to create Gitea endpoint") - handleError(ctx, w, err) - return - } - w.Header().Set("Content-Type", "application/json") - if err := json.NewEncoder(w).Encode(endpoint); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } -} - -// swagger:route GET /gitea/endpoints endpoints ListGiteaEndpoints -// -// List all Gitea Endpoints. -// -// Responses: -// 200: ForgeEndpoints -// default: APIErrorResponse -func (a *APIController) ListGiteaEndpoints(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - - endpoints, err := a.r.ListGiteaEndpoints(ctx) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to list Gitea endpoints") - handleError(ctx, w, err) - return - } - w.Header().Set("Content-Type", "application/json") - if err := json.NewEncoder(w).Encode(endpoints); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } -} - -// swagger:route GET /gitea/endpoints/{name} endpoints GetGiteaEndpoint -// -// Get a Gitea Endpoint. -// -// Parameters: -// + name: name -// description: The name of the Gitea endpoint. -// type: string -// in: path -// required: true -// -// Responses: -// 200: ForgeEndpoint -// default: APIErrorResponse -func (a *APIController) GetGiteaEndpoint(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - - vars := mux.Vars(r) - name, ok := vars["name"] - if !ok { - slog.ErrorContext(ctx, "missing name in request") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - endpoint, err := a.r.GetGiteaEndpoint(ctx, name) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to get Gitea endpoint") - handleError(ctx, w, err) - return - } - w.Header().Set("Content-Type", "application/json") - if err := json.NewEncoder(w).Encode(endpoint); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } -} - -// swagger:route DELETE /gitea/endpoints/{name} endpoints DeleteGiteaEndpoint -// -// Delete a Gitea Endpoint. -// -// Parameters: -// + name: name -// description: The name of the Gitea endpoint. -// type: string -// in: path -// required: true -// -// Responses: -// default: APIErrorResponse -func (a *APIController) DeleteGiteaEndpoint(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - - vars := mux.Vars(r) - name, ok := vars["name"] - if !ok { - slog.ErrorContext(ctx, "missing name in request") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - if err := a.r.DeleteGiteaEndpoint(ctx, name); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to delete Gitea endpoint") - handleError(ctx, w, err) - return - } - w.WriteHeader(http.StatusNoContent) -} - -// swagger:route PUT /gitea/endpoints/{name} endpoints UpdateGiteaEndpoint -// -// Update a Gitea Endpoint. -// -// Parameters: -// + name: name -// description: The name of the Gitea endpoint. -// type: string -// in: path -// required: true -// + name: Body -// description: Parameters used when updating a Gitea endpoint. -// type: UpdateGiteaEndpointParams -// in: body -// required: true -// -// Responses: -// 200: ForgeEndpoint -// default: APIErrorResponse -func (a *APIController) UpdateGiteaEndpoint(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - - vars := mux.Vars(r) - name, ok := vars["name"] - if !ok { - slog.ErrorContext(ctx, "missing name in request") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - - var params params.UpdateGiteaEndpointParams - if err := json.NewDecoder(r.Body).Decode(¶ms); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to decode request") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - - endpoint, err := a.r.UpdateGiteaEndpoint(ctx, name, params) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to update GitHub endpoint") - handleError(ctx, w, err) - return - } - w.Header().Set("Content-Type", "application/json") - if err := json.NewEncoder(w).Encode(endpoint); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } -} diff --git a/apiserver/controllers/instances.go b/apiserver/controllers/instances.go index 3209a5c2..962264f9 100644 --- a/apiserver/controllers/instances.go +++ b/apiserver/controllers/instances.go @@ -69,54 +69,6 @@ func (a *APIController) ListPoolInstancesHandler(w http.ResponseWriter, r *http. } } -// swagger:route GET /scalesets/{scalesetID}/instances instances ListScaleSetInstances -// -// List runner instances in a scale set. -// -// Parameters: -// + name: scalesetID -// description: Runner scale set ID. -// type: string -// in: path -// required: true -// -// Responses: -// 200: Instances -// default: APIErrorResponse -func (a *APIController) ListScaleSetInstancesHandler(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - vars := mux.Vars(r) - scalesetID, ok := vars["scalesetID"] - if !ok { - w.WriteHeader(http.StatusBadRequest) - if err := json.NewEncoder(w).Encode(params.APIErrorResponse{ - Error: "Bad Request", - Details: "No pool ID specified", - }); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } - return - } - id, err := strconv.ParseUint(scalesetID, 10, 32) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to parse id") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - - instances, err := a.r.ListScaleSetInstances(ctx, uint(id)) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "listing pool instances") - handleError(ctx, w, err) - return - } - - w.Header().Set("Content-Type", "application/json") - if err := json.NewEncoder(w).Encode(instances); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } -} - // swagger:route GET /instances/{instanceName} instances GetInstance // // Get runner instance by name. diff --git a/apiserver/controllers/organizations.go b/apiserver/controllers/organizations.go index 9089f440..ca2ef3b5 100644 --- a/apiserver/controllers/organizations.go +++ b/apiserver/controllers/organizations.go @@ -67,30 +67,13 @@ func (a *APIController) CreateOrgHandler(w http.ResponseWriter, r *http.Request) // // List organizations. // -// Parameters: -// + name: name -// description: Exact organization name to filter by -// type: string -// in: query -// required: false -// -// + name: endpoint -// description: Exact endpoint name to filter by -// type: string -// in: query -// required: false -// // Responses: // 200: Organizations // default: APIErrorResponse func (a *APIController) ListOrgsHandler(w http.ResponseWriter, r *http.Request) { ctx := r.Context() - filter := runnerParams.OrganizationFilter{ - Name: r.URL.Query().Get("name"), - Endpoint: r.URL.Query().Get("endpoint"), - } - orgs, err := a.r.ListOrganizations(ctx, filter) + orgs, err := a.r.ListOrganizations(ctx) if err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "listing orgs") handleError(ctx, w, err) @@ -304,62 +287,6 @@ func (a *APIController) CreateOrgPoolHandler(w http.ResponseWriter, r *http.Requ } } -// swagger:route POST /organizations/{orgID}/scalesets organizations scalesets CreateOrgScaleSet -// -// Create organization scale set with the parameters given. -// -// Parameters: -// + name: orgID -// description: Organization ID. -// type: string -// in: path -// required: true -// -// + name: Body -// description: Parameters used when creating the organization scale set. -// type: CreateScaleSetParams -// in: body -// required: true -// -// Responses: -// 200: ScaleSet -// default: APIErrorResponse -func (a *APIController) CreateOrgScaleSetHandler(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - - vars := mux.Vars(r) - orgID, ok := vars["orgID"] - if !ok { - w.WriteHeader(http.StatusBadRequest) - if err := json.NewEncoder(w).Encode(params.APIErrorResponse{ - Error: "Bad Request", - Details: "No org ID specified", - }); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } - return - } - - var scalesetData runnerParams.CreateScaleSetParams - if err := json.NewDecoder(r.Body).Decode(&scalesetData); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to decode") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - - scaleSet, err := a.r.CreateEntityScaleSet(ctx, runnerParams.ForgeEntityTypeOrganization, orgID, scalesetData) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "error creating organization scale set") - handleError(ctx, w, err) - return - } - - w.Header().Set("Content-Type", "application/json") - if err := json.NewEncoder(w).Encode(scaleSet); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } -} - // swagger:route GET /organizations/{orgID}/pools organizations pools ListOrgPools // // List organization pools. @@ -402,48 +329,6 @@ func (a *APIController) ListOrgPoolsHandler(w http.ResponseWriter, r *http.Reque } } -// swagger:route GET /organizations/{orgID}/scalesets organizations scalesets ListOrgScaleSets -// -// List organization scale sets. -// -// Parameters: -// + name: orgID -// description: Organization ID. -// type: string -// in: path -// required: true -// -// Responses: -// 200: ScaleSets -// default: APIErrorResponse -func (a *APIController) ListOrgScaleSetsHandler(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - vars := mux.Vars(r) - orgID, ok := vars["orgID"] - if !ok { - w.WriteHeader(http.StatusBadRequest) - if err := json.NewEncoder(w).Encode(params.APIErrorResponse{ - Error: "Bad Request", - Details: "No org ID specified", - }); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } - return - } - - scaleSets, err := a.r.ListEntityScaleSets(ctx, runnerParams.ForgeEntityTypeOrganization, orgID) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "listing scale sets") - handleError(ctx, w, err) - return - } - - w.Header().Set("Content-Type", "application/json") - if err := json.NewEncoder(w).Encode(scaleSets); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } -} - // swagger:route GET /organizations/{orgID}/pools/{poolID} organizations pools GetOrgPool // // Get organization pool by ID. diff --git a/apiserver/controllers/repositories.go b/apiserver/controllers/repositories.go index f3675790..7cc3c4f5 100644 --- a/apiserver/controllers/repositories.go +++ b/apiserver/controllers/repositories.go @@ -67,37 +67,13 @@ func (a *APIController) CreateRepoHandler(w http.ResponseWriter, r *http.Request // // List repositories. // -// Parameters: -// + name: owner -// description: Exact owner name to filter by -// type: string -// in: query -// required: false -// -// + name: name -// description: Exact repository name to filter by -// type: string -// in: query -// required: false -// -// + name: endpoint -// description: Exact endpoint name to filter by -// type: string -// in: query -// required: false -// // Responses: // 200: Repositories // default: APIErrorResponse func (a *APIController) ListReposHandler(w http.ResponseWriter, r *http.Request) { ctx := r.Context() - filter := runnerParams.RepositoryFilter{ - Name: r.URL.Query().Get("name"), - Owner: r.URL.Query().Get("owner"), - Endpoint: r.URL.Query().Get("endpoint"), - } - repos, err := a.r.ListRepositories(ctx, filter) + repos, err := a.r.ListRepositories(ctx) if err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "listing repositories") handleError(ctx, w, err) @@ -310,62 +286,6 @@ func (a *APIController) CreateRepoPoolHandler(w http.ResponseWriter, r *http.Req } } -// swagger:route POST /repositories/{repoID}/scalesets repositories scalesets CreateRepoScaleSet -// -// Create repository scale set with the parameters given. -// -// Parameters: -// + name: repoID -// description: Repository ID. -// type: string -// in: path -// required: true -// -// + name: Body -// description: Parameters used when creating the repository scale set. -// type: CreateScaleSetParams -// in: body -// required: true -// -// Responses: -// 200: ScaleSet -// default: APIErrorResponse -func (a *APIController) CreateRepoScaleSetHandler(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - - vars := mux.Vars(r) - repoID, ok := vars["repoID"] - if !ok { - w.WriteHeader(http.StatusBadRequest) - if err := json.NewEncoder(w).Encode(params.APIErrorResponse{ - Error: "Bad Request", - Details: "No repo ID specified", - }); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } - return - } - - var scaleSetData runnerParams.CreateScaleSetParams - if err := json.NewDecoder(r.Body).Decode(&scaleSetData); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to decode") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - - scaleSet, err := a.r.CreateEntityScaleSet(ctx, runnerParams.ForgeEntityTypeRepository, repoID, scaleSetData) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "error creating repository scale set") - handleError(ctx, w, err) - return - } - - w.Header().Set("Content-Type", "application/json") - if err := json.NewEncoder(w).Encode(scaleSet); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } -} - // swagger:route GET /repositories/{repoID}/pools repositories pools ListRepoPools // // List repository pools. @@ -408,48 +328,6 @@ func (a *APIController) ListRepoPoolsHandler(w http.ResponseWriter, r *http.Requ } } -// swagger:route GET /repositories/{repoID}/scalesets repositories scalesets ListRepoScaleSets -// -// List repository scale sets. -// -// Parameters: -// + name: repoID -// description: Repository ID. -// type: string -// in: path -// required: true -// -// Responses: -// 200: ScaleSets -// default: APIErrorResponse -func (a *APIController) ListRepoScaleSetsHandler(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - vars := mux.Vars(r) - repoID, ok := vars["repoID"] - if !ok { - w.WriteHeader(http.StatusBadRequest) - if err := json.NewEncoder(w).Encode(params.APIErrorResponse{ - Error: "Bad Request", - Details: "No repo ID specified", - }); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } - return - } - - scaleSets, err := a.r.ListEntityScaleSets(ctx, runnerParams.ForgeEntityTypeRepository, repoID) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "listing scale sets") - handleError(ctx, w, err) - return - } - - w.Header().Set("Content-Type", "application/json") - if err := json.NewEncoder(w).Encode(scaleSets); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } -} - // swagger:route GET /repositories/{repoID}/pools/{poolID} repositories pools GetRepoPool // // Get repository pool by ID. diff --git a/apiserver/controllers/scalesets.go b/apiserver/controllers/scalesets.go deleted file mode 100644 index 1d26221b..00000000 --- a/apiserver/controllers/scalesets.go +++ /dev/null @@ -1,211 +0,0 @@ -// Copyright 2022 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package controllers - -import ( - "encoding/json" - "log/slog" - "net/http" - "strconv" - - "github.com/gorilla/mux" - - gErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm/apiserver/params" - runnerParams "github.com/cloudbase/garm/params" -) - -// swagger:route GET /scalesets scalesets ListScalesets -// -// List all scalesets. -// -// Responses: -// 200: ScaleSets -// default: APIErrorResponse -func (a *APIController) ListAllScaleSetsHandler(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - - scalesets, err := a.r.ListAllScaleSets(ctx) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "listing scale sets") - handleError(ctx, w, err) - return - } - - w.Header().Set("Content-Type", "application/json") - if err := json.NewEncoder(w).Encode(scalesets); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } -} - -// swagger:route GET /scalesets/{scalesetID} scalesets GetScaleSet -// -// Get scale set by ID. -// -// Parameters: -// + name: scalesetID -// description: ID of the scale set to fetch. -// type: string -// in: path -// required: true -// -// Responses: -// 200: ScaleSet -// default: APIErrorResponse -func (a *APIController) GetScaleSetByIDHandler(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - - vars := mux.Vars(r) - scaleSetID, ok := vars["scalesetID"] - if !ok { - w.WriteHeader(http.StatusBadRequest) - if err := json.NewEncoder(w).Encode(params.APIErrorResponse{ - Error: "Bad Request", - Details: "No scale set ID specified", - }); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } - return - } - id, err := strconv.ParseUint(scaleSetID, 10, 32) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to parse id") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - - scaleSet, err := a.r.GetScaleSetByID(ctx, uint(id)) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "fetching scale set") - handleError(ctx, w, err) - return - } - - scaleSet.RunnerBootstrapTimeout = scaleSet.RunnerTimeout() - - w.Header().Set("Content-Type", "application/json") - if err := json.NewEncoder(w).Encode(scaleSet); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } -} - -// swagger:route DELETE /scalesets/{scalesetID} scalesets DeleteScaleSet -// -// Delete scale set by ID. -// -// Parameters: -// + name: scalesetID -// description: ID of the scale set to delete. -// type: string -// in: path -// required: true -// -// Responses: -// default: APIErrorResponse -func (a *APIController) DeleteScaleSetByIDHandler(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - - vars := mux.Vars(r) - scalesetID, ok := vars["scalesetID"] - if !ok { - w.WriteHeader(http.StatusBadRequest) - if err := json.NewEncoder(w).Encode(params.APIErrorResponse{ - Error: "Bad Request", - Details: "No scale set ID specified", - }); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } - return - } - - id, err := strconv.ParseUint(scalesetID, 10, 32) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to parse id") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - - if err := a.r.DeleteScaleSetByID(ctx, uint(id)); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "removing scale set") - handleError(ctx, w, err) - return - } - - w.Header().Set("Content-Type", "application/json") - w.WriteHeader(http.StatusOK) -} - -// swagger:route PUT /scalesets/{scalesetID} scalesets UpdateScaleSet -// -// Update scale set by ID. -// -// Parameters: -// + name: scalesetID -// description: ID of the scale set to update. -// type: string -// in: path -// required: true -// -// + name: Body -// description: Parameters to update the scale set with. -// type: UpdateScaleSetParams -// in: body -// required: true -// -// Responses: -// 200: ScaleSet -// default: APIErrorResponse -func (a *APIController) UpdateScaleSetByIDHandler(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - - vars := mux.Vars(r) - scalesetID, ok := vars["scalesetID"] - if !ok { - w.WriteHeader(http.StatusBadRequest) - if err := json.NewEncoder(w).Encode(params.APIErrorResponse{ - Error: "Bad Request", - Details: "No scale set ID specified", - }); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } - return - } - - id, err := strconv.ParseUint(scalesetID, 10, 32) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to parse id") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - - var scaleSetData runnerParams.UpdateScaleSetParams - if err := json.NewDecoder(r.Body).Decode(&scaleSetData); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to decode") - handleError(ctx, w, gErrors.ErrBadRequest) - return - } - - scaleSet, err := a.r.UpdateScaleSetByID(ctx, uint(id), scaleSetData) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "updating scale set") - handleError(ctx, w, err) - return - } - - w.Header().Set("Content-Type", "application/json") - if err := json.NewEncoder(w).Encode(scaleSet); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") - } -} diff --git a/workers/websocket/events/events.go b/apiserver/events/events.go similarity index 88% rename from workers/websocket/events/events.go rename to apiserver/events/events.go index 94d707f2..30e0b386 100644 --- a/workers/websocket/events/events.go +++ b/apiserver/events/events.go @@ -1,16 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package events import ( diff --git a/workers/websocket/events/params.go b/apiserver/events/params.go similarity index 71% rename from workers/websocket/events/params.go rename to apiserver/events/params.go index a2b996a9..274d3f1e 100644 --- a/workers/websocket/events/params.go +++ b/apiserver/events/params.go @@ -1,16 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package events import ( @@ -27,7 +14,7 @@ func (f Filter) Validate() error { case common.RepositoryEntityType, common.OrganizationEntityType, common.EnterpriseEntityType, common.PoolEntityType, common.UserEntityType, common.InstanceEntityType, common.JobEntityType, common.ControllerEntityType, common.GithubCredentialsEntityType, - common.GiteaCredentialsEntityType, common.ScaleSetEntityType, common.GithubEndpointEntityType: + common.GithubEndpointEntityType: default: return common.ErrInvalidEntityType } diff --git a/apiserver/params/params.go b/apiserver/params/params.go index ec42fab6..7aee3bd2 100644 --- a/apiserver/params/params.go +++ b/apiserver/params/params.go @@ -14,7 +14,6 @@ package params -// swagger:model APIErrorResponse // APIErrorResponse holds information about an error, returned by the API type APIErrorResponse struct { Error string `json:"error"` diff --git a/apiserver/routers/routers.go b/apiserver/routers/routers.go index ff241165..8c0434bc 100644 --- a/apiserver/routers/routers.go +++ b/apiserver/routers/routers.go @@ -57,8 +57,6 @@ import ( "github.com/cloudbase/garm/apiserver/controllers" "github.com/cloudbase/garm/auth" - "github.com/cloudbase/garm/config" - spaAssets "github.com/cloudbase/garm/webapp/assets" ) func WithMetricsRouter(parentRouter *mux.Router, disableAuth bool, metricsMiddlerware auth.Middleware) *mux.Router { @@ -84,30 +82,6 @@ func WithDebugServer(parentRouter *mux.Router) *mux.Router { return parentRouter } -func WithWebUI(parentRouter *mux.Router, apiConfig config.APIServer) *mux.Router { - if parentRouter == nil { - return nil - } - - if apiConfig.WebUI.EnableWebUI { - slog.Info("WebUI is enabled, adding webapp routes") - webappPath := apiConfig.WebUI.GetWebappPath() - slog.Info("Using webapp path", "path", webappPath) - // Accessing / should redirect to the UI - parentRouter.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { - http.Redirect(w, r, webappPath, http.StatusMovedPermanently) // 301 - }) - // Serve the SPA with dynamic path - parentRouter.PathPrefix(webappPath).HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - spaAssets.ServeSPAWithPath(w, r, webappPath) - }).Methods("GET") - } else { - slog.Info("WebUI is disabled, skipping webapp routes") - } - - return parentRouter -} - func requestLogger(h http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { // gathers metrics from the upstream handlers @@ -240,25 +214,6 @@ func NewAPIRouter(han *controllers.APIController, authMiddleware, initMiddleware apiRouter.Handle("/pools/{poolID}/instances/", http.HandlerFunc(han.ListPoolInstancesHandler)).Methods("GET", "OPTIONS") apiRouter.Handle("/pools/{poolID}/instances", http.HandlerFunc(han.ListPoolInstancesHandler)).Methods("GET", "OPTIONS") - //////////////// - // Scale sets // - //////////////// - // List all pools - apiRouter.Handle("/scalesets/", http.HandlerFunc(han.ListAllScaleSetsHandler)).Methods("GET", "OPTIONS") - apiRouter.Handle("/scalesets", http.HandlerFunc(han.ListAllScaleSetsHandler)).Methods("GET", "OPTIONS") - // Get one pool - apiRouter.Handle("/scalesets/{scalesetID}/", http.HandlerFunc(han.GetScaleSetByIDHandler)).Methods("GET", "OPTIONS") - apiRouter.Handle("/scalesets/{scalesetID}", http.HandlerFunc(han.GetScaleSetByIDHandler)).Methods("GET", "OPTIONS") - // Delete one pool - apiRouter.Handle("/scalesets/{scalesetID}/", http.HandlerFunc(han.DeleteScaleSetByIDHandler)).Methods("DELETE", "OPTIONS") - apiRouter.Handle("/scalesets/{scalesetID}", http.HandlerFunc(han.DeleteScaleSetByIDHandler)).Methods("DELETE", "OPTIONS") - // Update one pool - apiRouter.Handle("/scalesets/{scalesetID}/", http.HandlerFunc(han.UpdateScaleSetByIDHandler)).Methods("PUT", "OPTIONS") - apiRouter.Handle("/scalesets/{scalesetID}", http.HandlerFunc(han.UpdateScaleSetByIDHandler)).Methods("PUT", "OPTIONS") - // List pool instances - apiRouter.Handle("/scalesets/{scalesetID}/instances/", http.HandlerFunc(han.ListScaleSetInstancesHandler)).Methods("GET", "OPTIONS") - apiRouter.Handle("/scalesets/{scalesetID}/instances", http.HandlerFunc(han.ListScaleSetInstancesHandler)).Methods("GET", "OPTIONS") - ///////////// // Runners // ///////////// @@ -291,14 +246,6 @@ func NewAPIRouter(han *controllers.APIController, authMiddleware, initMiddleware apiRouter.Handle("/repositories/{repoID}/pools/", http.HandlerFunc(han.CreateRepoPoolHandler)).Methods("POST", "OPTIONS") apiRouter.Handle("/repositories/{repoID}/pools", http.HandlerFunc(han.CreateRepoPoolHandler)).Methods("POST", "OPTIONS") - // Create scale set - apiRouter.Handle("/repositories/{repoID}/scalesets/", http.HandlerFunc(han.CreateRepoScaleSetHandler)).Methods("POST", "OPTIONS") - apiRouter.Handle("/repositories/{repoID}/scalesets", http.HandlerFunc(han.CreateRepoScaleSetHandler)).Methods("POST", "OPTIONS") - - // List scale sets - apiRouter.Handle("/repositories/{repoID}/scalesets/", http.HandlerFunc(han.ListRepoScaleSetsHandler)).Methods("GET", "OPTIONS") - apiRouter.Handle("/repositories/{repoID}/scalesets", http.HandlerFunc(han.ListRepoScaleSetsHandler)).Methods("GET", "OPTIONS") - // Repo instances list apiRouter.Handle("/repositories/{repoID}/instances/", http.HandlerFunc(han.ListRepoInstancesHandler)).Methods("GET", "OPTIONS") apiRouter.Handle("/repositories/{repoID}/instances", http.HandlerFunc(han.ListRepoInstancesHandler)).Methods("GET", "OPTIONS") @@ -349,14 +296,6 @@ func NewAPIRouter(han *controllers.APIController, authMiddleware, initMiddleware apiRouter.Handle("/organizations/{orgID}/pools/", http.HandlerFunc(han.CreateOrgPoolHandler)).Methods("POST", "OPTIONS") apiRouter.Handle("/organizations/{orgID}/pools", http.HandlerFunc(han.CreateOrgPoolHandler)).Methods("POST", "OPTIONS") - // Create org scale set - apiRouter.Handle("/organizations/{orgID}/scalesets/", http.HandlerFunc(han.CreateOrgScaleSetHandler)).Methods("POST", "OPTIONS") - apiRouter.Handle("/organizations/{orgID}/scalesets", http.HandlerFunc(han.CreateOrgScaleSetHandler)).Methods("POST", "OPTIONS") - - // List org scale sets - apiRouter.Handle("/organizations/{orgID}/scalesets/", http.HandlerFunc(han.ListOrgScaleSetsHandler)).Methods("GET", "OPTIONS") - apiRouter.Handle("/organizations/{orgID}/scalesets", http.HandlerFunc(han.ListOrgScaleSetsHandler)).Methods("GET", "OPTIONS") - // Org instances list apiRouter.Handle("/organizations/{orgID}/instances/", http.HandlerFunc(han.ListOrgInstancesHandler)).Methods("GET", "OPTIONS") apiRouter.Handle("/organizations/{orgID}/instances", http.HandlerFunc(han.ListOrgInstancesHandler)).Methods("GET", "OPTIONS") @@ -407,14 +346,6 @@ func NewAPIRouter(han *controllers.APIController, authMiddleware, initMiddleware apiRouter.Handle("/enterprises/{enterpriseID}/pools/", http.HandlerFunc(han.CreateEnterprisePoolHandler)).Methods("POST", "OPTIONS") apiRouter.Handle("/enterprises/{enterpriseID}/pools", http.HandlerFunc(han.CreateEnterprisePoolHandler)).Methods("POST", "OPTIONS") - // Create enterprise scale sets - apiRouter.Handle("/enterprises/{enterpriseID}/scalesets/", http.HandlerFunc(han.CreateEnterpriseScaleSetHandler)).Methods("POST", "OPTIONS") - apiRouter.Handle("/enterprises/{enterpriseID}/scalesets", http.HandlerFunc(han.CreateEnterpriseScaleSetHandler)).Methods("POST", "OPTIONS") - - // List enterprise scale sets - apiRouter.Handle("/enterprises/{enterpriseID}/scalesets/", http.HandlerFunc(han.ListEnterpriseScaleSetsHandler)).Methods("GET", "OPTIONS") - apiRouter.Handle("/enterprises/{enterpriseID}/scalesets", http.HandlerFunc(han.ListEnterpriseScaleSetsHandler)).Methods("GET", "OPTIONS") - // Enterprise instances list apiRouter.Handle("/enterprises/{enterpriseID}/instances/", http.HandlerFunc(han.ListEnterpriseInstancesHandler)).Methods("GET", "OPTIONS") apiRouter.Handle("/enterprises/{enterpriseID}/instances", http.HandlerFunc(han.ListEnterpriseInstancesHandler)).Methods("GET", "OPTIONS") @@ -480,44 +411,6 @@ func NewAPIRouter(han *controllers.APIController, authMiddleware, initMiddleware apiRouter.Handle("/github/credentials/{id}/", http.HandlerFunc(han.UpdateGithubCredential)).Methods("PUT", "OPTIONS") apiRouter.Handle("/github/credentials/{id}", http.HandlerFunc(han.UpdateGithubCredential)).Methods("PUT", "OPTIONS") - ////////////////////// - // Gitea Endpoints // - ////////////////////// - // Create Gitea Endpoint - apiRouter.Handle("/gitea/endpoints/", http.HandlerFunc(han.CreateGiteaEndpoint)).Methods("POST", "OPTIONS") - apiRouter.Handle("/gitea/endpoints", http.HandlerFunc(han.CreateGiteaEndpoint)).Methods("POST", "OPTIONS") - // List Gitea Endpoints - apiRouter.Handle("/gitea/endpoints/", http.HandlerFunc(han.ListGiteaEndpoints)).Methods("GET", "OPTIONS") - apiRouter.Handle("/gitea/endpoints", http.HandlerFunc(han.ListGiteaEndpoints)).Methods("GET", "OPTIONS") - // Get Gitea Endpoint - apiRouter.Handle("/gitea/endpoints/{name}/", http.HandlerFunc(han.GetGiteaEndpoint)).Methods("GET", "OPTIONS") - apiRouter.Handle("/gitea/endpoints/{name}", http.HandlerFunc(han.GetGiteaEndpoint)).Methods("GET", "OPTIONS") - // Delete Gitea Endpoint - apiRouter.Handle("/gitea/endpoints/{name}/", http.HandlerFunc(han.DeleteGiteaEndpoint)).Methods("DELETE", "OPTIONS") - apiRouter.Handle("/gitea/endpoints/{name}", http.HandlerFunc(han.DeleteGiteaEndpoint)).Methods("DELETE", "OPTIONS") - // Update Gitea Endpoint - apiRouter.Handle("/gitea/endpoints/{name}/", http.HandlerFunc(han.UpdateGiteaEndpoint)).Methods("PUT", "OPTIONS") - apiRouter.Handle("/gitea/endpoints/{name}", http.HandlerFunc(han.UpdateGiteaEndpoint)).Methods("PUT", "OPTIONS") - - //////////////////////// - // Gitea credentials // - //////////////////////// - // List Gitea Credentials - apiRouter.Handle("/gitea/credentials/", http.HandlerFunc(han.ListGiteaCredentials)).Methods("GET", "OPTIONS") - apiRouter.Handle("/gitea/credentials", http.HandlerFunc(han.ListGiteaCredentials)).Methods("GET", "OPTIONS") - // Create Gitea Credentials - apiRouter.Handle("/gitea/credentials/", http.HandlerFunc(han.CreateGiteaCredential)).Methods("POST", "OPTIONS") - apiRouter.Handle("/gitea/credentials", http.HandlerFunc(han.CreateGiteaCredential)).Methods("POST", "OPTIONS") - // Get Gitea Credential - apiRouter.Handle("/gitea/credentials/{id}/", http.HandlerFunc(han.GetGiteaCredential)).Methods("GET", "OPTIONS") - apiRouter.Handle("/gitea/credentials/{id}", http.HandlerFunc(han.GetGiteaCredential)).Methods("GET", "OPTIONS") - // Delete Gitea Credential - apiRouter.Handle("/gitea/credentials/{id}/", http.HandlerFunc(han.DeleteGiteaCredential)).Methods("DELETE", "OPTIONS") - apiRouter.Handle("/gitea/credentials/{id}", http.HandlerFunc(han.DeleteGiteaCredential)).Methods("DELETE", "OPTIONS") - // Update Gitea Credential - apiRouter.Handle("/gitea/credentials/{id}/", http.HandlerFunc(han.UpdateGiteaCredential)).Methods("PUT", "OPTIONS") - apiRouter.Handle("/gitea/credentials/{id}", http.HandlerFunc(han.UpdateGiteaCredential)).Methods("PUT", "OPTIONS") - ///////////////////////// // Websocket endpoints // ///////////////////////// @@ -531,7 +424,7 @@ func NewAPIRouter(han *controllers.APIController, authMiddleware, initMiddleware apiRouter.Handle("/ws/events/", http.HandlerFunc(han.EventsHandler)).Methods("GET") apiRouter.Handle("/ws/events", http.HandlerFunc(han.EventsHandler)).Methods("GET") - // NotFound handler - this should be last + // NotFound handler apiRouter.PathPrefix("/").HandlerFunc(han.NotFoundHandler).Methods("GET", "POST", "PUT", "DELETE", "OPTIONS") return router } diff --git a/apiserver/swagger-models.yaml b/apiserver/swagger-models.yaml index 74eaac84..88c6bd8d 100644 --- a/apiserver/swagger-models.yaml +++ b/apiserver/swagger-models.yaml @@ -74,11 +74,11 @@ definitions: package: github.com/cloudbase/garm/params alias: garm_params items: - $ref: '#/definitions/ForgeCredentials' - ForgeCredentials: + $ref: '#/definitions/GithubCredentials' + GithubCredentials: type: object x-go-type: - type: ForgeCredentials + type: GithubCredentials import: package: github.com/cloudbase/garm/params alias: garm_params @@ -130,22 +130,6 @@ definitions: import: package: github.com/cloudbase/garm/params alias: garm_params - ScaleSets: - type: array - x-go-type: - type: ScaleSets - import: - package: github.com/cloudbase/garm/params - alias: garm_params - items: - $ref: '#/definitions/ScaleSet' - ScaleSet: - type: object - x-go-type: - type: ScaleSet - import: - package: github.com/cloudbase/garm/params - alias: garm_params Repositories: type: array x-go-type: @@ -229,13 +213,6 @@ definitions: import: package: github.com/cloudbase/garm/params alias: garm_params - CreateScaleSetParams: - type: object - x-go-type: - type: CreateScaleSetParams - import: - package: github.com/cloudbase/garm/params - alias: garm_params UpdatePoolParams: type: object x-go-type: @@ -243,13 +220,6 @@ definitions: import: package: github.com/cloudbase/garm/params alias: garm_params - UpdateScaleSetParams: - type: object - x-go-type: - type: UpdateScaleSetParams - import: - package: github.com/cloudbase/garm/params - alias: garm_params APIErrorResponse: type: object x-go-type: @@ -271,29 +241,22 @@ definitions: import: package: github.com/cloudbase/garm/params alias: garm_params - UpdateGiteaEndpointParams: + GithubEndpoint: type: object x-go-type: - type: UpdateGiteaEndpointParams + type: GithubEndpoint import: package: github.com/cloudbase/garm/params alias: garm_params - ForgeEndpoint: - type: object - x-go-type: - type: ForgeEndpoint - import: - package: github.com/cloudbase/garm/params - alias: garm_params - ForgeEndpoints: + GithubEndpoints: type: array x-go-type: - type: ForgeEndpoints + type: GithubEndpoints import: package: github.com/cloudbase/garm/params alias: garm_params items: - $ref: '#/definitions/ForgeEndpoint' + $ref: '#/definitions/GithubEndpoint' CreateGithubEndpointParams: type: object x-go-type: @@ -301,13 +264,6 @@ definitions: import: package: github.com/cloudbase/garm/params alias: garm_params - CreateGiteaEndpointParams: - type: object - x-go-type: - type: CreateGiteaEndpointParams - import: - package: github.com/cloudbase/garm/params - alias: garm_params CreateGithubCredentialsParams: type: object x-go-type: @@ -315,13 +271,6 @@ definitions: import: package: github.com/cloudbase/garm/params alias: garm_params - CreateGiteaCredentialsParams: - type: object - x-go-type: - type: CreateGiteaCredentialsParams - import: - package: github.com/cloudbase/garm/params - alias: garm_params UpdateGithubCredentialsParams: type: object x-go-type: @@ -329,13 +278,6 @@ definitions: import: package: github.com/cloudbase/garm/params alias: garm_params - UpdateGiteaCredentialsParams: - type: object - x-go-type: - type: UpdateGiteaCredentialsParams - import: - package: github.com/cloudbase/garm/params - alias: garm_params UpdateControllerParams: type: object x-go-type: diff --git a/apiserver/swagger.yaml b/apiserver/swagger.yaml index bf02a2d7..42c573f0 100644 --- a/apiserver/swagger.yaml +++ b/apiserver/swagger.yaml @@ -23,20 +23,6 @@ definitions: alias: garm_params package: github.com/cloudbase/garm/params type: CreateEnterpriseParams - CreateGiteaCredentialsParams: - type: object - x-go-type: - import: - alias: garm_params - package: github.com/cloudbase/garm/params - type: CreateGiteaCredentialsParams - CreateGiteaEndpointParams: - type: object - x-go-type: - import: - alias: garm_params - package: github.com/cloudbase/garm/params - type: CreateGiteaEndpointParams CreateGithubCredentialsParams: type: object x-go-type: @@ -79,16 +65,9 @@ definitions: alias: garm_params package: github.com/cloudbase/garm/params type: CreateRepoParams - CreateScaleSetParams: - type: object - x-go-type: - import: - alias: garm_params - package: github.com/cloudbase/garm/params - type: CreateScaleSetParams Credentials: items: - $ref: '#/definitions/ForgeCredentials' + $ref: '#/definitions/GithubCredentials' type: array x-go-type: import: @@ -111,29 +90,29 @@ definitions: alias: garm_params package: github.com/cloudbase/garm/params type: Enterprises - ForgeCredentials: + GithubCredentials: type: object x-go-type: import: alias: garm_params package: github.com/cloudbase/garm/params - type: ForgeCredentials - ForgeEndpoint: + type: GithubCredentials + GithubEndpoint: type: object x-go-type: import: alias: garm_params package: github.com/cloudbase/garm/params - type: ForgeEndpoint - ForgeEndpoints: + type: GithubEndpoint + GithubEndpoints: items: - $ref: '#/definitions/ForgeEndpoint' + $ref: '#/definitions/GithubEndpoint' type: array x-go-type: import: alias: garm_params package: github.com/cloudbase/garm/params - type: ForgeEndpoints + type: GithubEndpoints HookInfo: type: object x-go-type: @@ -265,22 +244,6 @@ definitions: alias: garm_params package: github.com/cloudbase/garm/params type: Repository - ScaleSet: - type: object - x-go-type: - import: - alias: garm_params - package: github.com/cloudbase/garm/params - type: ScaleSet - ScaleSets: - items: - $ref: '#/definitions/ScaleSet' - type: array - x-go-type: - import: - alias: garm_params - package: github.com/cloudbase/garm/params - type: ScaleSets UpdateControllerParams: type: object x-go-type: @@ -295,20 +258,6 @@ definitions: alias: garm_params package: github.com/cloudbase/garm/params type: UpdateEntityParams - UpdateGiteaCredentialsParams: - type: object - x-go-type: - import: - alias: garm_params - package: github.com/cloudbase/garm/params - type: UpdateGiteaCredentialsParams - UpdateGiteaEndpointParams: - type: object - x-go-type: - import: - alias: garm_params - package: github.com/cloudbase/garm/params - type: UpdateGiteaEndpointParams UpdateGithubCredentialsParams: type: object x-go-type: @@ -330,13 +279,6 @@ definitions: alias: garm_params package: github.com/cloudbase/garm/params type: UpdatePoolParams - UpdateScaleSetParams: - type: object - x-go-type: - import: - alias: garm_params - package: github.com/cloudbase/garm/params - type: UpdateScaleSetParams User: type: object x-go-type: @@ -418,15 +360,6 @@ paths: /enterprises: get: operationId: ListEnterprises - parameters: - - description: Exact enterprise name to filter by - in: query - name: name - type: string - - description: Exact endpoint name to filter by - in: query - name: endpoint - type: string responses: "200": description: Enterprises @@ -683,57 +616,6 @@ paths: tags: - enterprises - pools - /enterprises/{enterpriseID}/scalesets: - get: - operationId: ListEnterpriseScaleSets - parameters: - - description: Enterprise ID. - in: path - name: enterpriseID - required: true - type: string - responses: - "200": - description: ScaleSets - schema: - $ref: '#/definitions/ScaleSets' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List enterprise scale sets. - tags: - - enterprises - - scalesets - post: - operationId: CreateEnterpriseScaleSet - parameters: - - description: Enterprise ID. - in: path - name: enterpriseID - required: true - type: string - - description: Parameters used when creating the enterprise scale set. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/CreateScaleSetParams' - description: Parameters used when creating the enterprise scale set. - type: object - responses: - "200": - description: ScaleSet - schema: - $ref: '#/definitions/ScaleSet' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Create enterprise pool with the parameters given. - tags: - - enterprises - - scalesets /first-run: post: operationId: FirstRun @@ -758,212 +640,6 @@ paths: summary: Initialize the first run of the controller. tags: - first-run - /gitea/credentials: - get: - operationId: ListGiteaCredentials - responses: - "200": - description: Credentials - schema: - $ref: '#/definitions/Credentials' - "400": - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List all credentials. - tags: - - credentials - post: - operationId: CreateGiteaCredentials - parameters: - - description: Parameters used when creating a Gitea credential. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/CreateGiteaCredentialsParams' - description: Parameters used when creating a Gitea credential. - type: object - responses: - "200": - description: ForgeCredentials - schema: - $ref: '#/definitions/ForgeCredentials' - "400": - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Create a Gitea credential. - tags: - - credentials - /gitea/credentials/{id}: - delete: - operationId: DeleteGiteaCredentials - parameters: - - description: ID of the Gitea credential. - in: path - name: id - required: true - type: integer - responses: - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Delete a Gitea credential. - tags: - - credentials - get: - operationId: GetGiteaCredentials - parameters: - - description: ID of the Gitea credential. - in: path - name: id - required: true - type: integer - responses: - "200": - description: ForgeCredentials - schema: - $ref: '#/definitions/ForgeCredentials' - "400": - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Get a Gitea credential. - tags: - - credentials - put: - operationId: UpdateGiteaCredentials - parameters: - - description: ID of the Gitea credential. - in: path - name: id - required: true - type: integer - - description: Parameters used when updating a Gitea credential. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/UpdateGiteaCredentialsParams' - description: Parameters used when updating a Gitea credential. - type: object - responses: - "200": - description: ForgeCredentials - schema: - $ref: '#/definitions/ForgeCredentials' - "400": - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Update a Gitea credential. - tags: - - credentials - /gitea/endpoints: - get: - operationId: ListGiteaEndpoints - responses: - "200": - description: ForgeEndpoints - schema: - $ref: '#/definitions/ForgeEndpoints' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List all Gitea Endpoints. - tags: - - endpoints - post: - operationId: CreateGiteaEndpoint - parameters: - - description: Parameters used when creating a Gitea endpoint. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/CreateGiteaEndpointParams' - description: Parameters used when creating a Gitea endpoint. - type: object - responses: - "200": - description: ForgeEndpoint - schema: - $ref: '#/definitions/ForgeEndpoint' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Create a Gitea Endpoint. - tags: - - endpoints - /gitea/endpoints/{name}: - delete: - operationId: DeleteGiteaEndpoint - parameters: - - description: The name of the Gitea endpoint. - in: path - name: name - required: true - type: string - responses: - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Delete a Gitea Endpoint. - tags: - - endpoints - get: - operationId: GetGiteaEndpoint - parameters: - - description: The name of the Gitea endpoint. - in: path - name: name - required: true - type: string - responses: - "200": - description: ForgeEndpoint - schema: - $ref: '#/definitions/ForgeEndpoint' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Get a Gitea Endpoint. - tags: - - endpoints - put: - operationId: UpdateGiteaEndpoint - parameters: - - description: The name of the Gitea endpoint. - in: path - name: name - required: true - type: string - - description: Parameters used when updating a Gitea endpoint. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/UpdateGiteaEndpointParams' - description: Parameters used when updating a Gitea endpoint. - type: object - responses: - "200": - description: ForgeEndpoint - schema: - $ref: '#/definitions/ForgeEndpoint' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Update a Gitea Endpoint. - tags: - - endpoints /github/credentials: get: operationId: ListCredentials @@ -992,9 +668,9 @@ paths: type: object responses: "200": - description: ForgeCredentials + description: GithubCredentials schema: - $ref: '#/definitions/ForgeCredentials' + $ref: '#/definitions/GithubCredentials' "400": description: APIErrorResponse schema: @@ -1029,9 +705,9 @@ paths: type: integer responses: "200": - description: ForgeCredentials + description: GithubCredentials schema: - $ref: '#/definitions/ForgeCredentials' + $ref: '#/definitions/GithubCredentials' "400": description: APIErrorResponse schema: @@ -1057,9 +733,9 @@ paths: type: object responses: "200": - description: ForgeCredentials + description: GithubCredentials schema: - $ref: '#/definitions/ForgeCredentials' + $ref: '#/definitions/GithubCredentials' "400": description: APIErrorResponse schema: @@ -1072,9 +748,9 @@ paths: operationId: ListGithubEndpoints responses: "200": - description: ForgeEndpoints + description: GithubEndpoints schema: - $ref: '#/definitions/ForgeEndpoints' + $ref: '#/definitions/GithubEndpoints' default: description: APIErrorResponse schema: @@ -1095,9 +771,9 @@ paths: type: object responses: "200": - description: ForgeEndpoint + description: GithubEndpoint schema: - $ref: '#/definitions/ForgeEndpoint' + $ref: '#/definitions/GithubEndpoint' default: description: APIErrorResponse schema: @@ -1132,9 +808,9 @@ paths: type: string responses: "200": - description: ForgeEndpoint + description: GithubEndpoint schema: - $ref: '#/definitions/ForgeEndpoint' + $ref: '#/definitions/GithubEndpoint' default: description: APIErrorResponse schema: @@ -1160,9 +836,9 @@ paths: type: object responses: "200": - description: ForgeEndpoint + description: GithubEndpoint schema: - $ref: '#/definitions/ForgeEndpoint' + $ref: '#/definitions/GithubEndpoint' default: description: APIErrorResponse schema: @@ -1263,15 +939,6 @@ paths: /organizations: get: operationId: ListOrgs - parameters: - - description: Exact organization name to filter by - in: query - name: name - type: string - - description: Exact endpoint name to filter by - in: query - name: endpoint - type: string responses: "200": description: Organizations @@ -1532,57 +1199,6 @@ paths: tags: - organizations - pools - /organizations/{orgID}/scalesets: - get: - operationId: ListOrgScaleSets - parameters: - - description: Organization ID. - in: path - name: orgID - required: true - type: string - responses: - "200": - description: ScaleSets - schema: - $ref: '#/definitions/ScaleSets' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List organization scale sets. - tags: - - organizations - - scalesets - post: - operationId: CreateOrgScaleSet - parameters: - - description: Organization ID. - in: path - name: orgID - required: true - type: string - - description: Parameters used when creating the organization scale set. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/CreateScaleSetParams' - description: Parameters used when creating the organization scale set. - type: object - responses: - "200": - description: ScaleSet - schema: - $ref: '#/definitions/ScaleSet' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Create organization scale set with the parameters given. - tags: - - organizations - - scalesets /organizations/{orgID}/webhook: delete: operationId: UninstallOrgWebhook @@ -1772,19 +1388,6 @@ paths: /repositories: get: operationId: ListRepos - parameters: - - description: Exact owner name to filter by - in: query - name: owner - type: string - - description: Exact repository name to filter by - in: query - name: name - type: string - - description: Exact endpoint name to filter by - in: query - name: endpoint - type: string responses: "200": description: Repositories @@ -2045,57 +1648,6 @@ paths: tags: - repositories - pools - /repositories/{repoID}/scalesets: - get: - operationId: ListRepoScaleSets - parameters: - - description: Repository ID. - in: path - name: repoID - required: true - type: string - responses: - "200": - description: ScaleSets - schema: - $ref: '#/definitions/ScaleSets' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List repository scale sets. - tags: - - repositories - - scalesets - post: - operationId: CreateRepoScaleSet - parameters: - - description: Repository ID. - in: path - name: repoID - required: true - type: string - - description: Parameters used when creating the repository scale set. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/CreateScaleSetParams' - description: Parameters used when creating the repository scale set. - type: object - responses: - "200": - description: ScaleSet - schema: - $ref: '#/definitions/ScaleSet' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Create repository scale set with the parameters given. - tags: - - repositories - - scalesets /repositories/{repoID}/webhook: delete: operationId: UninstallRepoWebhook @@ -2166,107 +1718,6 @@ paths: tags: - repositories - hooks - /scalesets: - get: - operationId: ListScalesets - responses: - "200": - description: ScaleSets - schema: - $ref: '#/definitions/ScaleSets' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List all scalesets. - tags: - - scalesets - /scalesets/{scalesetID}: - delete: - operationId: DeleteScaleSet - parameters: - - description: ID of the scale set to delete. - in: path - name: scalesetID - required: true - type: string - responses: - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Delete scale set by ID. - tags: - - scalesets - get: - operationId: GetScaleSet - parameters: - - description: ID of the scale set to fetch. - in: path - name: scalesetID - required: true - type: string - responses: - "200": - description: ScaleSet - schema: - $ref: '#/definitions/ScaleSet' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Get scale set by ID. - tags: - - scalesets - put: - operationId: UpdateScaleSet - parameters: - - description: ID of the scale set to update. - in: path - name: scalesetID - required: true - type: string - - description: Parameters to update the scale set with. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/UpdateScaleSetParams' - description: Parameters to update the scale set with. - type: object - responses: - "200": - description: ScaleSet - schema: - $ref: '#/definitions/ScaleSet' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Update scale set by ID. - tags: - - scalesets - /scalesets/{scalesetID}/instances: - get: - operationId: ListScaleSetInstances - parameters: - - description: Runner scale set ID. - in: path - name: scalesetID - required: true - type: string - responses: - "200": - description: Instances - schema: - $ref: '#/definitions/Instances' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List runner instances in a scale set. - tags: - - instances produces: - application/json security: diff --git a/auth/admin_required.go b/auth/admin_required.go index b3ca3624..8ab6cbac 100644 --- a/auth/admin_required.go +++ b/auth/admin_required.go @@ -1,16 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package auth import "net/http" diff --git a/auth/auth.go b/auth/auth.go index c5fa1ebd..7dfabcf0 100644 --- a/auth/auth.go +++ b/auth/auth.go @@ -16,12 +16,11 @@ package auth import ( "context" - "errors" - "fmt" "time" jwt "github.com/golang-jwt/jwt/v5" "github.com/nbutton23/zxcvbn-go" + "github.com/pkg/errors" "golang.org/x/crypto/bcrypt" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -50,7 +49,7 @@ func (a *Authenticator) IsInitialized() bool { func (a *Authenticator) GetJWTToken(ctx context.Context) (string, error) { tokenID, err := util.GetRandomString(16) if err != nil { - return "", fmt.Errorf("error generating random string: %w", err) + return "", errors.Wrap(err, "generating random string") } expireToken := time.Now().Add(a.cfg.TimeToLive.Duration()) expires := &jwt.NumericDate{ @@ -73,7 +72,7 @@ func (a *Authenticator) GetJWTToken(ctx context.Context) (string, error) { token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) tokenString, err := token.SignedString([]byte(a.cfg.Secret)) if err != nil { - return "", fmt.Errorf("error fetching token string: %w", err) + return "", errors.Wrap(err, "fetching token string") } return tokenString, nil @@ -88,7 +87,7 @@ func (a *Authenticator) GetJWTMetricsToken(ctx context.Context) (string, error) tokenID, err := util.GetRandomString(16) if err != nil { - return "", fmt.Errorf("error generating random string: %w", err) + return "", errors.Wrap(err, "generating random string") } // nolint:golangci-lint,godox // TODO: currently this is the same TTL as the normal Token @@ -112,7 +111,7 @@ func (a *Authenticator) GetJWTMetricsToken(ctx context.Context) (string, error) token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) tokenString, err := token.SignedString([]byte(a.cfg.Secret)) if err != nil { - return "", fmt.Errorf("error fetching token string: %w", err) + return "", errors.Wrap(err, "fetching token string") } return tokenString, nil @@ -122,7 +121,7 @@ func (a *Authenticator) InitController(ctx context.Context, param params.NewUser _, err := a.store.ControllerInfo() if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { - return params.User{}, fmt.Errorf("error initializing controller: %w", err) + return params.User{}, errors.Wrap(err, "initializing controller") } } if a.store.HasAdminUser(ctx) { @@ -152,7 +151,7 @@ func (a *Authenticator) InitController(ctx context.Context, param params.NewUser hashed, err := util.PaswsordToBcrypt(param.Password) if err != nil { - return params.User{}, fmt.Errorf("error creating user: %w", err) + return params.User{}, errors.Wrap(err, "creating user") } param.Password = hashed @@ -170,7 +169,7 @@ func (a *Authenticator) AuthenticateUser(ctx context.Context, info params.Passwo if errors.Is(err, runnerErrors.ErrNotFound) { return ctx, runnerErrors.ErrUnauthorized } - return ctx, fmt.Errorf("error authenticating: %w", err) + return ctx, errors.Wrap(err, "authenticating") } if !user.Enabled { diff --git a/auth/context.go b/auth/context.go index 1b648bb6..0d95be56 100644 --- a/auth/context.go +++ b/auth/context.go @@ -44,21 +44,8 @@ const ( instanceTokenFetched contextFlags = "tokenFetched" instanceHasJITConfig contextFlags = "hasJITConfig" instanceParams contextFlags = "instanceParams" - instanceForgeTypeKey contextFlags = "forge_type" ) -func SetInstanceForgeType(ctx context.Context, val string) context.Context { - return context.WithValue(ctx, instanceForgeTypeKey, val) -} - -func InstanceForgeType(ctx context.Context) params.EndpointType { - elem := ctx.Value(instanceForgeTypeKey) - if elem == nil { - return "" - } - return elem.(params.EndpointType) -} - func SetInstanceID(ctx context.Context, id string) context.Context { return context.WithValue(ctx, instanceIDKey, id) } @@ -172,7 +159,7 @@ func InstanceEntity(ctx context.Context) string { return elem.(string) } -func PopulateInstanceContext(ctx context.Context, instance params.Instance, claims *InstanceJWTClaims) context.Context { +func PopulateInstanceContext(ctx context.Context, instance params.Instance) context.Context { ctx = SetInstanceID(ctx, instance.ID) ctx = SetInstanceName(ctx, instance.Name) ctx = SetInstancePoolID(ctx, instance.PoolID) @@ -180,7 +167,6 @@ func PopulateInstanceContext(ctx context.Context, instance params.Instance, clai ctx = SetInstanceTokenFetched(ctx, instance.TokenFetched) ctx = SetInstanceHasJITConfig(ctx, instance.JitConfiguration) ctx = SetInstanceParams(ctx, instance) - ctx = SetInstanceForgeType(ctx, claims.ForgeType) return ctx } diff --git a/auth/init_required.go b/auth/init_required.go index 3ef31d70..2d3e1715 100644 --- a/auth/init_required.go +++ b/auth/init_required.go @@ -38,8 +38,8 @@ type initRequired struct { func (i *initRequired) Middleware(next http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { ctx := r.Context() - - if !i.store.HasAdminUser(ctx) { + ctrlInfo, err := i.store.ControllerInfo() + if err != nil || ctrlInfo.ControllerID.String() == "" { w.Header().Add("Content-Type", "application/json") w.WriteHeader(http.StatusConflict) if err := json.NewEncoder(w).Encode(params.InitializationRequired); err != nil { diff --git a/auth/instance_middleware.go b/auth/instance_middleware.go index 6d1d66e4..c21be3e7 100644 --- a/auth/instance_middleware.go +++ b/auth/instance_middleware.go @@ -18,12 +18,12 @@ import ( "context" "fmt" "log/slog" - "math" "net/http" "strings" "time" jwt "github.com/golang-jwt/jwt/v5" + "github.com/pkg/errors" runnerErrors "github.com/cloudbase/garm-provider-common/errors" commonParams "github.com/cloudbase/garm-provider-common/params" @@ -39,11 +39,10 @@ type InstanceJWTClaims struct { Name string `json:"name"` PoolID string `json:"provider_id"` // Scope is either repository or organization - Scope params.ForgeEntityType `json:"scope"` + Scope params.GithubEntityType `json:"scope"` // Entity is the repo or org name Entity string `json:"entity"` CreateAttempt int `json:"create_attempt"` - ForgeType string `json:"forge_type"` jwt.RegisteredClaims } @@ -60,17 +59,11 @@ type instanceToken struct { jwtSecret string } -func (i *instanceToken) NewInstanceJWTToken(instance params.Instance, entity params.ForgeEntity, entityType params.ForgeEntityType, ttlMinutes uint) (string, error) { +func (i *instanceToken) NewInstanceJWTToken(instance params.Instance, entity string, poolType params.GithubEntityType, ttlMinutes uint) (string, error) { // Token expiration is equal to the bootstrap timeout set on the pool plus the polling // interval garm uses to check for timed out runners. Runners that have not sent their info // by the end of this interval are most likely failed and will be reaped by garm anyway. - var ttl int - if ttlMinutes > math.MaxInt { - ttl = math.MaxInt - } else { - ttl = int(ttlMinutes) - } - expireToken := time.Now().Add(time.Duration(ttl)*time.Minute + common.PoolReapTimeoutInterval) + expireToken := time.Now().Add(time.Duration(ttlMinutes)*time.Minute + common.PoolReapTimeoutInterval) expires := &jwt.NumericDate{ Time: expireToken, } @@ -82,15 +75,14 @@ func (i *instanceToken) NewInstanceJWTToken(instance params.Instance, entity par ID: instance.ID, Name: instance.Name, PoolID: instance.PoolID, - Scope: entityType, - Entity: entity.String(), - ForgeType: string(entity.Credentials.ForgeType), + Scope: poolType, + Entity: entity, CreateAttempt: instance.CreateAttempt, } token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) tokenString, err := token.SignedString([]byte(i.jwtSecret)) if err != nil { - return "", fmt.Errorf("error signing token: %w", err) + return "", errors.Wrap(err, "signing token") } return tokenString, nil @@ -120,12 +112,12 @@ func (amw *instanceMiddleware) claimsToContext(ctx context.Context, claims *Inst return nil, runnerErrors.ErrUnauthorized } - instanceInfo, err := amw.store.GetInstance(ctx, claims.Name) + instanceInfo, err := amw.store.GetInstanceByName(ctx, claims.Name) if err != nil { return ctx, runnerErrors.ErrUnauthorized } - ctx = PopulateInstanceContext(ctx, instanceInfo, claims) + ctx = PopulateInstanceContext(ctx, instanceInfo) return ctx, nil } diff --git a/auth/interfaces.go b/auth/interfaces.go index ab68dbd7..4e4d370c 100644 --- a/auth/interfaces.go +++ b/auth/interfaces.go @@ -26,5 +26,5 @@ type Middleware interface { } type InstanceTokenGetter interface { - NewInstanceJWTToken(instance params.Instance, entity params.ForgeEntity, poolType params.ForgeEntityType, ttlMinutes uint) (string, error) + NewInstanceJWTToken(instance params.Instance, entity string, poolType params.GithubEntityType, ttlMinutes uint) (string, error) } diff --git a/auth/jwt.go b/auth/jwt.go index 52fce0c9..e9b5745f 100644 --- a/auth/jwt.go +++ b/auth/jwt.go @@ -97,37 +97,26 @@ func invalidAuthResponse(ctx context.Context, w http.ResponseWriter) { } } -func (amw *jwtMiddleware) getTokenFromRequest(r *http.Request) (string, error) { - authorizationHeader := r.Header.Get("authorization") - if authorizationHeader == "" { - cookie, err := r.Cookie("garm_token") - if err != nil { - return "", fmt.Errorf("failed to get cookie: %w", err) - } - return cookie.Value, nil - } - - bearerToken := strings.Split(authorizationHeader, " ") - if len(bearerToken) != 2 { - return "", fmt.Errorf("invalid auth header") - } - return bearerToken[1], nil -} - // Middleware implements the middleware interface func (amw *jwtMiddleware) Middleware(next http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { // nolint:golangci-lint,godox // TODO: Log error details when authentication fails ctx := r.Context() - authToken, err := amw.getTokenFromRequest(r) - if err != nil { - slog.ErrorContext(ctx, "failed to get auth token", "error", err) + authorizationHeader := r.Header.Get("authorization") + if authorizationHeader == "" { invalidAuthResponse(ctx, w) return } + + bearerToken := strings.Split(authorizationHeader, " ") + if len(bearerToken) != 2 { + invalidAuthResponse(ctx, w) + return + } + claims := &JWTClaims{} - token, err := jwt.ParseWithClaims(authToken, claims, func(token *jwt.Token) (interface{}, error) { + token, err := jwt.ParseWithClaims(bearerToken[1], claims, func(token *jwt.Token) (interface{}, error) { if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok { return nil, fmt.Errorf("invalid signing method") } diff --git a/auth/metrics.go b/auth/metrics.go index 5ea688e2..55cede44 100644 --- a/auth/metrics.go +++ b/auth/metrics.go @@ -1,16 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package auth import ( diff --git a/build-webapp.sh b/build-webapp.sh deleted file mode 100755 index 01b13c04..00000000 --- a/build-webapp.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash - -set -e - -echo "Building GARM SPA (SvelteKit)..." - -# Navigate to webapp directory -cd webapp - -# Install dependencies if node_modules doesn't exist -npm install - -# Build the SPA -echo "Building SPA..." -npm run build -echo "SPA built successfully!" diff --git a/cache/cache_test.go b/cache/cache_test.go deleted file mode 100644 index 7a8ebed3..00000000 --- a/cache/cache_test.go +++ /dev/null @@ -1,1040 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package cache - -import ( - "testing" - "time" - - "github.com/stretchr/testify/suite" - - runnerErrors "github.com/cloudbase/garm-provider-common/errors" - commonParams "github.com/cloudbase/garm-provider-common/params" - garmTesting "github.com/cloudbase/garm/internal/testing" - "github.com/cloudbase/garm/params" -) - -type CacheTestSuite struct { - suite.Suite - entity params.ForgeEntity -} - -func (c *CacheTestSuite) SetupTest() { - c.entity = params.ForgeEntity{ - ID: "1234", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - Credentials: params.ForgeCredentials{ - ID: 1, - Name: "test", - ForgeType: params.GithubEndpointType, - }, - } -} - -func (c *CacheTestSuite) TearDownTest() { - // Clean up the cache after each test - githubToolsCache.mux.Lock() - defer githubToolsCache.mux.Unlock() - githubToolsCache.entities = make(map[string]GithubEntityTools) - giteaCredentialsCache.cache = make(map[uint]params.ForgeCredentials) - credentialsCache.cache = make(map[uint]params.ForgeCredentials) - instanceCache.cache = make(map[string]params.Instance) - entityCache = &EntityCache{ - entities: make(map[string]EntityItem), - } -} - -func (c *CacheTestSuite) TestCacheIsInitialized() { - c.Require().NotNil(githubToolsCache) - c.Require().NotNil(credentialsCache) - c.Require().NotNil(instanceCache) - c.Require().NotNil(entityCache) -} - -func (c *CacheTestSuite) TestSetToolsCacheWorks() { - tools := []commonParams.RunnerApplicationDownload{ - { - DownloadURL: garmTesting.Ptr("https://example.com"), - }, - } - c.Require().NotNil(githubToolsCache) - c.Require().Len(githubToolsCache.entities, 0) - SetGithubToolsCache(c.entity, tools) - c.Require().Len(githubToolsCache.entities, 1) - cachedTools, err := GetGithubToolsCache(c.entity.ID) - c.Require().NoError(err) - c.Require().Len(cachedTools, 1) - c.Require().Equal(tools[0].GetDownloadURL(), cachedTools[0].GetDownloadURL()) -} - -func (c *CacheTestSuite) TestSetToolsCacheWithError() { - tools := []commonParams.RunnerApplicationDownload{ - { - DownloadURL: garmTesting.Ptr("https://example.com"), - }, - } - c.Require().NotNil(githubToolsCache) - c.Require().Len(githubToolsCache.entities, 0) - SetGithubToolsCache(c.entity, tools) - entity := githubToolsCache.entities[c.entity.ID] - - c.Require().Equal(int64(entity.expiresAt.Sub(entity.updatedAt).Minutes()), int64(60)) - c.Require().Len(githubToolsCache.entities, 1) - SetGithubToolsCacheError(c.entity, runnerErrors.ErrNotFound) - - cachedTools, err := GetGithubToolsCache(c.entity.ID) - c.Require().Error(err) - c.Require().Nil(cachedTools) -} - -func (c *CacheTestSuite) TestSetErrorOnNonExistingCacheEntity() { - entity := params.ForgeEntity{ - ID: "non-existing-entity", - } - c.Require().NotNil(githubToolsCache) - c.Require().Len(githubToolsCache.entities, 0) - SetGithubToolsCacheError(entity, runnerErrors.ErrNotFound) - - storedEntity, err := GetGithubToolsCache(entity.ID) - c.Require().Error(err) - c.Require().Nil(storedEntity) -} - -func (c *CacheTestSuite) TestTimedOutToolsCache() { - tools := []commonParams.RunnerApplicationDownload{ - { - DownloadURL: garmTesting.Ptr("https://example.com"), - }, - } - - c.Require().NotNil(githubToolsCache) - c.Require().Len(githubToolsCache.entities, 0) - SetGithubToolsCache(c.entity, tools) - entity := githubToolsCache.entities[c.entity.ID] - - c.Require().Equal(int64(entity.expiresAt.Sub(entity.updatedAt).Minutes()), int64(60)) - c.Require().Len(githubToolsCache.entities, 1) - entity = githubToolsCache.entities[c.entity.ID] - entity.updatedAt = entity.updatedAt.Add(-3 * time.Hour) - entity.expiresAt = entity.updatedAt.Add(-2 * time.Hour) - githubToolsCache.entities[c.entity.ID] = entity - - cachedTools, err := GetGithubToolsCache(c.entity.ID) - c.Require().Error(err) - c.Require().Nil(cachedTools) -} - -func (c *CacheTestSuite) TestGetInexistentCache() { - c.Require().NotNil(githubToolsCache) - c.Require().Len(githubToolsCache.entities, 0) - cachedTools, err := GetGithubToolsCache(c.entity.ID) - c.Require().Error(err) - c.Require().Nil(cachedTools) -} - -func (c *CacheTestSuite) TestSetGithubCredentials() { - credentials := params.ForgeCredentials{ - ID: 1, - } - SetGithubCredentials(credentials) - cachedCreds, ok := GetGithubCredentials(1) - c.Require().True(ok) - c.Require().Equal(credentials.ID, cachedCreds.ID) -} - -func (c *CacheTestSuite) TestGetGithubCredentials() { - credentials := params.ForgeCredentials{ - ID: 1, - } - SetGithubCredentials(credentials) - cachedCreds, ok := GetGithubCredentials(1) - c.Require().True(ok) - c.Require().Equal(credentials.ID, cachedCreds.ID) - - nonExisting, ok := GetGithubCredentials(2) - c.Require().False(ok) - c.Require().Equal(params.ForgeCredentials{}, nonExisting) -} - -func (c *CacheTestSuite) TestDeleteGithubCredentials() { - credentials := params.ForgeCredentials{ - ID: 1, - } - SetGithubCredentials(credentials) - cachedCreds, ok := GetGithubCredentials(1) - c.Require().True(ok) - c.Require().Equal(credentials.ID, cachedCreds.ID) - - DeleteGithubCredentials(1) - cachedCreds, ok = GetGithubCredentials(1) - c.Require().False(ok) - c.Require().Equal(params.ForgeCredentials{}, cachedCreds) -} - -func (c *CacheTestSuite) TestGetAllGithubCredentials() { - credentials1 := params.ForgeCredentials{ - ID: 1, - } - credentials2 := params.ForgeCredentials{ - ID: 2, - } - SetGithubCredentials(credentials1) - SetGithubCredentials(credentials2) - - cachedCreds := GetAllGithubCredentials() - c.Require().Len(cachedCreds, 2) - c.Require().Contains(cachedCreds, credentials1) - c.Require().Contains(cachedCreds, credentials2) -} - -func (c *CacheTestSuite) TestSetInstanceCache() { - instance := params.Instance{ - Name: "test-instance", - } - SetInstanceCache(instance) - cachedInstance, ok := GetInstanceCache("test-instance") - c.Require().True(ok) - c.Require().Equal(instance.Name, cachedInstance.Name) -} - -func (c *CacheTestSuite) TestGetInstanceCache() { - instance := params.Instance{ - Name: "test-instance", - } - SetInstanceCache(instance) - cachedInstance, ok := GetInstanceCache("test-instance") - c.Require().True(ok) - c.Require().Equal(instance.Name, cachedInstance.Name) - - nonExisting, ok := GetInstanceCache("non-existing") - c.Require().False(ok) - c.Require().Equal(params.Instance{}, nonExisting) -} - -func (c *CacheTestSuite) TestDeleteInstanceCache() { - instance := params.Instance{ - Name: "test-instance", - } - SetInstanceCache(instance) - cachedInstance, ok := GetInstanceCache("test-instance") - c.Require().True(ok) - c.Require().Equal(instance.Name, cachedInstance.Name) - - DeleteInstanceCache("test-instance") - cachedInstance, ok = GetInstanceCache("test-instance") - c.Require().False(ok) - c.Require().Equal(params.Instance{}, cachedInstance) -} - -func (c *CacheTestSuite) TestGetAllInstances() { - instance1 := params.Instance{ - Name: "test-instance-1", - } - instance2 := params.Instance{ - Name: "test-instance-2", - } - SetInstanceCache(instance1) - SetInstanceCache(instance2) - - cachedInstances := GetAllInstancesCache() - c.Require().Len(cachedInstances, 2) - c.Require().Contains(cachedInstances, instance1) - c.Require().Contains(cachedInstances, instance2) -} - -func (c *CacheTestSuite) TestGetInstancesForPool() { - instance1 := params.Instance{ - Name: "test-instance-1", - PoolID: "pool-1", - } - instance2 := params.Instance{ - Name: "test-instance-2", - PoolID: "pool-1", - } - instance3 := params.Instance{ - Name: "test-instance-3", - PoolID: "pool-2", - } - SetInstanceCache(instance1) - SetInstanceCache(instance2) - SetInstanceCache(instance3) - - cachedInstances := GetInstancesForPool("pool-1") - c.Require().Len(cachedInstances, 2) - c.Require().Contains(cachedInstances, instance1) - c.Require().Contains(cachedInstances, instance2) - - cachedInstances = GetInstancesForPool("pool-2") - c.Require().Len(cachedInstances, 1) - c.Require().Contains(cachedInstances, instance3) -} - -func (c *CacheTestSuite) TestGetInstancesForScaleSet() { - instance1 := params.Instance{ - Name: "test-instance-1", - ScaleSetID: 1, - } - instance2 := params.Instance{ - Name: "test-instance-2", - ScaleSetID: 1, - } - instance3 := params.Instance{ - Name: "test-instance-3", - ScaleSetID: 2, - } - SetInstanceCache(instance1) - SetInstanceCache(instance2) - SetInstanceCache(instance3) - - cachedInstances := GetInstancesForScaleSet(1) - c.Require().Len(cachedInstances, 2) - c.Require().Contains(cachedInstances, instance1) - c.Require().Contains(cachedInstances, instance2) - - cachedInstances = GetInstancesForScaleSet(2) - c.Require().Len(cachedInstances, 1) - c.Require().Contains(cachedInstances, instance3) -} - -func (c *CacheTestSuite) TestSetGetEntityCache() { - entity := params.ForgeEntity{ - ID: "test-entity", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - } - SetEntity(entity) - cachedEntity, ok := GetEntity("test-entity") - c.Require().True(ok) - c.Require().Equal(entity.ID, cachedEntity.ID) - - pool := params.Pool{ - ID: "pool-1", - } - SetEntityPool(entity.ID, pool) - cachedEntityPools := GetEntityPools("test-entity") - c.Require().Equal(1, len(cachedEntityPools)) - - entity.Credentials.Description = "test description" - SetEntity(entity) - cachedEntity, ok = GetEntity("test-entity") - c.Require().True(ok) - c.Require().Equal(entity.ID, cachedEntity.ID) - c.Require().Equal(entity.Credentials.Description, cachedEntity.Credentials.Description) - - // Make sure we don't clobber pools after updating the entity - cachedEntityPools = GetEntityPools("test-entity") - c.Require().Equal(1, len(cachedEntityPools)) -} - -func (c *CacheTestSuite) TestReplaceEntityPools() { - entity := params.ForgeEntity{ - ID: "test-entity", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - Credentials: params.ForgeCredentials{ - ID: 1, - ForgeType: params.GithubEndpointType, - }, - } - pool1 := params.Pool{ - ID: "pool-1", - } - pool2 := params.Pool{ - ID: "pool-2", - } - - credentials := params.ForgeCredentials{ - ID: 1, - Name: "test", - ForgeType: params.GithubEndpointType, - } - SetGithubCredentials(credentials) - - SetEntity(entity) - ReplaceEntityPools(entity.ID, []params.Pool{pool1, pool2}) - cachedEntity, ok := GetEntity(entity.ID) - c.Require().True(ok) - c.Require().Equal(entity.ID, cachedEntity.ID) - c.Require().Equal("test", cachedEntity.Credentials.Name) - - pools := GetEntityPools(entity.ID) - c.Require().Len(pools, 2) - c.Require().Contains(pools, pool1) - c.Require().Contains(pools, pool2) -} - -func (c *CacheTestSuite) TestReplaceEntityScaleSets() { - entity := params.ForgeEntity{ - ID: "test-entity", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - } - scaleSet1 := params.ScaleSet{ - ID: 1, - } - scaleSet2 := params.ScaleSet{ - ID: 2, - } - - SetEntity(entity) - ReplaceEntityScaleSets(entity.ID, []params.ScaleSet{scaleSet1, scaleSet2}) - cachedEntity, ok := GetEntity(entity.ID) - c.Require().True(ok) - c.Require().Equal(entity.ID, cachedEntity.ID) - - scaleSets := GetEntityScaleSets(entity.ID) - c.Require().Len(scaleSets, 2) - c.Require().Contains(scaleSets, scaleSet1) - c.Require().Contains(scaleSets, scaleSet2) -} - -func (c *CacheTestSuite) TestDeleteEntity() { - entity := params.ForgeEntity{ - ID: "test-entity", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - } - SetEntity(entity) - cachedEntity, ok := GetEntity(entity.ID) - c.Require().True(ok) - c.Require().Equal(entity.ID, cachedEntity.ID) - - DeleteEntity(entity.ID) - cachedEntity, ok = GetEntity(entity.ID) - c.Require().False(ok) - c.Require().Equal(params.ForgeEntity{}, cachedEntity) -} - -func (c *CacheTestSuite) TestSetEntityPool() { - entity := params.ForgeEntity{ - ID: "test-entity", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - } - pool := params.Pool{ - ID: "pool-1", - } - - SetEntity(entity) - - SetEntityPool(entity.ID, pool) - cachedEntity, ok := GetEntity(entity.ID) - c.Require().True(ok) - c.Require().Equal(entity.ID, cachedEntity.ID) - pools := GetEntityPools(entity.ID) - c.Require().Len(pools, 1) - c.Require().Contains(pools, pool) - c.Require().False(pools[0].Enabled) - - pool.Enabled = true - SetEntityPool(entity.ID, pool) - cachedEntity, ok = GetEntity(entity.ID) - c.Require().True(ok) - c.Require().Equal(entity.ID, cachedEntity.ID) - pools = GetEntityPools(entity.ID) - c.Require().Len(pools, 1) - c.Require().Contains(pools, pool) - c.Require().True(pools[0].Enabled) -} - -func (c *CacheTestSuite) TestSetEntityScaleSet() { - entity := params.ForgeEntity{ - ID: "test-entity", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - } - scaleSet := params.ScaleSet{ - ID: 1, - } - - SetEntity(entity) - SetEntityScaleSet(entity.ID, scaleSet) - - cachedEntity, ok := GetEntity(entity.ID) - c.Require().True(ok) - c.Require().Equal(entity.ID, cachedEntity.ID) - scaleSets := GetEntityScaleSets(entity.ID) - c.Require().Len(scaleSets, 1) - c.Require().Contains(scaleSets, scaleSet) - c.Require().False(scaleSets[0].Enabled) - - scaleSet.Enabled = true - SetEntityScaleSet(entity.ID, scaleSet) - scaleSets = GetEntityScaleSets(entity.ID) - c.Require().Len(scaleSets, 1) - c.Require().Contains(scaleSets, scaleSet) - c.Require().True(scaleSets[0].Enabled) -} - -func (c *CacheTestSuite) TestDeleteEntityPool() { - entity := params.ForgeEntity{ - ID: "test-entity", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - } - pool := params.Pool{ - ID: "pool-1", - } - - SetEntity(entity) - SetEntityPool(entity.ID, pool) - cachedEntity, ok := GetEntity(entity.ID) - c.Require().True(ok) - c.Require().Equal(entity.ID, cachedEntity.ID) - - DeleteEntityPool(entity.ID, pool.ID) - pools := GetEntityPools(entity.ID) - c.Require().Len(pools, 0) - c.Require().NotContains(pools, pool) -} - -func (c *CacheTestSuite) TestDeleteEntityScaleSet() { - entity := params.ForgeEntity{ - ID: "test-entity", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - } - scaleSet := params.ScaleSet{ - ID: 1, - } - - SetEntity(entity) - SetEntityScaleSet(entity.ID, scaleSet) - cachedEntity, ok := GetEntity(entity.ID) - c.Require().True(ok) - c.Require().Equal(entity.ID, cachedEntity.ID) - - DeleteEntityScaleSet(entity.ID, scaleSet.ID) - scaleSets := GetEntityScaleSets(entity.ID) - c.Require().Len(scaleSets, 0) - c.Require().NotContains(scaleSets, scaleSet) -} - -func (c *CacheTestSuite) TestFindPoolsMatchingAllTags() { - entity := params.ForgeEntity{ - ID: "test-entity", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - } - pool1 := params.Pool{ - ID: "pool-1", - Tags: []params.Tag{ - { - Name: "tag1", - }, - { - Name: "tag2", - }, - }, - } - pool2 := params.Pool{ - ID: "pool-2", - Tags: []params.Tag{ - { - Name: "tag1", - }, - }, - } - pool3 := params.Pool{ - ID: "pool-3", - Tags: []params.Tag{ - { - Name: "tag3", - }, - }, - } - - SetEntity(entity) - SetEntityPool(entity.ID, pool1) - SetEntityPool(entity.ID, pool2) - SetEntityPool(entity.ID, pool3) - - cachedEntity, ok := GetEntity(entity.ID) - c.Require().True(ok) - c.Require().Equal(entity.ID, cachedEntity.ID) - pools := FindPoolsMatchingAllTags(entity.ID, []string{"tag1", "tag2"}) - c.Require().Len(pools, 1) - c.Require().Contains(pools, pool1) - pools = FindPoolsMatchingAllTags(entity.ID, []string{"tag1"}) - c.Require().Len(pools, 2) - c.Require().Contains(pools, pool1) - c.Require().Contains(pools, pool2) - pools = FindPoolsMatchingAllTags(entity.ID, []string{"tag3"}) - c.Require().Len(pools, 1) - c.Require().Contains(pools, pool3) - pools = FindPoolsMatchingAllTags(entity.ID, []string{"tag4"}) - c.Require().Len(pools, 0) -} - -func (c *CacheTestSuite) TestGetEntityPools() { - entity := params.ForgeEntity{ - ID: "test-entity", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - } - pool1 := params.Pool{ - ID: "pool-1", - Tags: []params.Tag{ - { - Name: "tag1", - }, - { - Name: "tag2", - }, - }, - } - pool2 := params.Pool{ - ID: "pool-2", - Tags: []params.Tag{ - { - Name: "tag1", - }, - { - Name: "tag3", - }, - }, - } - - SetEntity(entity) - SetEntityPool(entity.ID, pool1) - SetEntityPool(entity.ID, pool2) - cachedEntity, ok := GetEntity(entity.ID) - c.Require().True(ok) - c.Require().Equal(entity.ID, cachedEntity.ID) - pools := GetEntityPools(entity.ID) - c.Require().Len(pools, 2) - c.Require().Contains(pools, pool1) - c.Require().Contains(pools, pool2) -} - -func (c *CacheTestSuite) TestGetEntityScaleSet() { - entity := params.ForgeEntity{ - ID: "test-entity", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - } - scaleSet := params.ScaleSet{ - ID: 1, - } - - SetEntity(entity) - SetEntityScaleSet(entity.ID, scaleSet) - - cachedEntity, ok := GetEntity(entity.ID) - c.Require().True(ok) - c.Require().Equal(entity.ID, cachedEntity.ID) - scaleSets, ok := GetEntityScaleSet(entity.ID, scaleSet.ID) - c.Require().True(ok) - c.Require().Equal(scaleSet.ID, scaleSets.ID) -} - -func (c *CacheTestSuite) TestGetEntityPool() { - entity := params.ForgeEntity{ - ID: "test-entity", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - } - - pool := params.Pool{ - ID: "pool-1", - Tags: []params.Tag{ - { - Name: "tag1", - }, - { - Name: "tag2", - }, - }, - } - - SetEntity(entity) - SetEntityPool(entity.ID, pool) - cachedEntity, ok := GetEntity(entity.ID) - c.Require().True(ok) - c.Require().Equal(entity.ID, cachedEntity.ID) - poolFromCache, ok := GetEntityPool(entity.ID, pool.ID) - c.Require().True(ok) - c.Require().Equal(pool.ID, poolFromCache.ID) -} - -func (c *CacheTestSuite) TestSetGiteaCredentials() { - credentials := params.ForgeCredentials{ - ID: 1, - Description: "test description", - } - SetGiteaCredentials(credentials) - cachedCreds, ok := GetGiteaCredentials(1) - c.Require().True(ok) - c.Require().Equal(credentials.ID, cachedCreds.ID) - - cachedCreds.Description = "new description" - SetGiteaCredentials(cachedCreds) - cachedCreds, ok = GetGiteaCredentials(1) - c.Require().True(ok) - c.Require().Equal(credentials.ID, cachedCreds.ID) - c.Require().Equal("new description", cachedCreds.Description) -} - -func (c *CacheTestSuite) TestGetAllGiteaCredentials() { - credentials1 := params.ForgeCredentials{ - ID: 1, - } - credentials2 := params.ForgeCredentials{ - ID: 2, - } - SetGiteaCredentials(credentials1) - SetGiteaCredentials(credentials2) - - cachedCreds := GetAllGiteaCredentials() - c.Require().Len(cachedCreds, 2) - c.Require().Contains(cachedCreds, credentials1) - c.Require().Contains(cachedCreds, credentials2) -} - -func (c *CacheTestSuite) TestDeleteGiteaCredentials() { - credentials := params.ForgeCredentials{ - ID: 1, - } - SetGiteaCredentials(credentials) - cachedCreds, ok := GetGiteaCredentials(1) - c.Require().True(ok) - c.Require().Equal(credentials.ID, cachedCreds.ID) - - DeleteGiteaCredentials(1) - cachedCreds, ok = GetGiteaCredentials(1) - c.Require().False(ok) - c.Require().Equal(params.ForgeCredentials{}, cachedCreds) -} - -func (c *CacheTestSuite) TestDeleteGiteaCredentialsNotFound() { - credentials := params.ForgeCredentials{ - ID: 1, - } - SetGiteaCredentials(credentials) - cachedCreds, ok := GetGiteaCredentials(1) - c.Require().True(ok) - c.Require().Equal(credentials.ID, cachedCreds.ID) - - DeleteGiteaCredentials(2) - cachedCreds, ok = GetGiteaCredentials(1) - c.Require().True(ok) - c.Require().Equal(credentials.ID, cachedCreds.ID) -} - -func (c *CacheTestSuite) TestUpdateCredentialsInAffectedEntities() { - credentials := params.ForgeCredentials{ - ID: 1, - Description: "test description", - } - entity1 := params.ForgeEntity{ - ID: "test-entity-1", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - Credentials: credentials, - } - - entity2 := params.ForgeEntity{ - ID: "test-entity-2", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - Credentials: credentials, - } - - SetEntity(entity1) - SetEntity(entity2) - - cachedEntity1, ok := GetEntity(entity1.ID) - c.Require().True(ok) - c.Require().Equal(entity1.ID, cachedEntity1.ID) - cachedEntity2, ok := GetEntity(entity2.ID) - c.Require().True(ok) - c.Require().Equal(entity2.ID, cachedEntity2.ID) - - c.Require().Equal(credentials.ID, cachedEntity1.Credentials.ID) - c.Require().Equal(credentials.ID, cachedEntity2.Credentials.ID) - c.Require().Equal(credentials.Description, cachedEntity1.Credentials.Description) - c.Require().Equal(credentials.Description, cachedEntity2.Credentials.Description) - - credentials.Description = "new description" - SetGiteaCredentials(credentials) - - cachedEntity1, ok = GetEntity(entity1.ID) - c.Require().True(ok) - c.Require().Equal(entity1.ID, cachedEntity1.ID) - cachedEntity2, ok = GetEntity(entity2.ID) - c.Require().True(ok) - c.Require().Equal(entity2.ID, cachedEntity2.ID) - - c.Require().Equal(credentials.ID, cachedEntity1.Credentials.ID) - c.Require().Equal(credentials.ID, cachedEntity2.Credentials.ID) - c.Require().Equal(credentials.Description, cachedEntity1.Credentials.Description) - c.Require().Equal(credentials.Description, cachedEntity2.Credentials.Description) -} - -func (c *CacheTestSuite) TestSetGiteaEntity() { - credentials := params.ForgeCredentials{ - ID: 1, - Description: "test description", - ForgeType: params.GiteaEndpointType, - } - entity := params.ForgeEntity{ - ID: "test-entity", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - Credentials: credentials, - } - - SetGiteaCredentials(credentials) - SetEntity(entity) - - cachedEntity, ok := GetEntity(entity.ID) - c.Require().True(ok) - c.Require().Equal(entity.ID, cachedEntity.ID) - c.Require().Equal(credentials.ID, cachedEntity.Credentials.ID) - c.Require().Equal(credentials.Description, cachedEntity.Credentials.Description) - c.Require().Equal(credentials.ForgeType, cachedEntity.Credentials.ForgeType) -} - -func (c *CacheTestSuite) TestGetEntitiesUsingCredentials() { - credentials := params.ForgeCredentials{ - ID: 1, - Description: "test description", - Name: "test", - ForgeType: params.GithubEndpointType, - } - - credentials2 := params.ForgeCredentials{ - ID: 2, - Description: "test description2", - Name: "test", - ForgeType: params.GiteaEndpointType, - } - - entity1 := params.ForgeEntity{ - ID: "test-entity-1", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - Credentials: credentials, - } - - entity2 := params.ForgeEntity{ - ID: "test-entity-2", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - Credentials: credentials, - } - entity3 := params.ForgeEntity{ - ID: "test-entity-3", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - Credentials: credentials2, - } - - SetEntity(entity1) - SetEntity(entity2) - SetEntity(entity3) - - cachedEntities := GetEntitiesUsingCredentials(credentials) - c.Require().Len(cachedEntities, 2) - c.Require().Contains(cachedEntities, entity1) - c.Require().Contains(cachedEntities, entity2) - - cachedEntities = GetEntitiesUsingCredentials(credentials2) - c.Require().Len(cachedEntities, 1) - c.Require().Contains(cachedEntities, entity3) -} - -func (c *CacheTestSuite) TestGetallEntities() { - credentials := params.ForgeCredentials{ - ID: 1, - Description: "test description", - Name: "test", - ForgeType: params.GithubEndpointType, - } - - credentials2 := params.ForgeCredentials{ - ID: 2, - Description: "test description2", - Name: "test", - ForgeType: params.GiteaEndpointType, - } - - entity1 := params.ForgeEntity{ - ID: "test-entity-1", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - Credentials: credentials, - CreatedAt: time.Now(), - } - - entity2 := params.ForgeEntity{ - ID: "test-entity-2", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - Credentials: credentials, - CreatedAt: time.Now().Add(1 * time.Second), - } - - entity3 := params.ForgeEntity{ - ID: "test-entity-3", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - Credentials: credentials2, - CreatedAt: time.Now().Add(2 * time.Second), - } - - SetEntity(entity1) - SetEntity(entity2) - SetEntity(entity3) - - // Sorted by creation date - cachedEntities := GetAllEntities() - c.Require().Len(cachedEntities, 3) - c.Require().Equal(cachedEntities[0], entity1) - c.Require().Equal(cachedEntities[1], entity2) - c.Require().Equal(cachedEntities[2], entity3) -} - -func (c *CacheTestSuite) TestGetAllPools() { - entity := params.ForgeEntity{ - ID: "test-entity", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - } - pool1 := params.Pool{ - ID: "pool-1", - CreatedAt: time.Now(), - Tags: []params.Tag{ - { - Name: "tag1", - }, - { - Name: "tag2", - }, - }, - } - - pool2 := params.Pool{ - ID: "pool-2", - CreatedAt: time.Now().Add(1 * time.Second), - Tags: []params.Tag{ - { - Name: "tag1", - }, - { - Name: "tag3", - }, - }, - } - - SetEntity(entity) - SetEntityPool(entity.ID, pool1) - SetEntityPool(entity.ID, pool2) - cachedEntity, ok := GetEntity(entity.ID) - c.Require().True(ok) - c.Require().Equal(entity.ID, cachedEntity.ID) - pools := GetAllPools() - c.Require().Len(pools, 2) - c.Require().Equal(pools[0].ID, pool1.ID) - c.Require().Equal(pools[1].ID, pool2.ID) -} - -func (c *CacheTestSuite) TestGetAllScaleSets() { - entity := params.ForgeEntity{ - ID: "test-entity", - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - Owner: "test", - } - scaleSet1 := params.ScaleSet{ - ID: 1, - } - scaleSet2 := params.ScaleSet{ - ID: 2, - } - - SetEntity(entity) - SetEntityScaleSet(entity.ID, scaleSet1) - SetEntityScaleSet(entity.ID, scaleSet2) - cachedEntity, ok := GetEntity(entity.ID) - c.Require().True(ok) - c.Require().Equal(entity.ID, cachedEntity.ID) - scaleSets := GetAllScaleSets() - c.Require().Len(scaleSets, 2) - c.Require().Equal(scaleSets[0].ID, scaleSet1.ID) - c.Require().Equal(scaleSets[1].ID, scaleSet2.ID) -} - -func (c *CacheTestSuite) TestGetAllGetAllGithubCredentialsAsMap() { - credentials1 := params.ForgeCredentials{ - ID: 1, - } - credentials2 := params.ForgeCredentials{ - ID: 2, - } - SetGithubCredentials(credentials1) - SetGithubCredentials(credentials2) - - cachedCreds := GetAllGithubCredentialsAsMap() - c.Require().Len(cachedCreds, 2) - c.Require().Contains(cachedCreds, credentials1.ID) - c.Require().Contains(cachedCreds, credentials2.ID) -} - -func (c *CacheTestSuite) TestGetAllGiteaCredentialsAsMap() { - credentials1 := params.ForgeCredentials{ - ID: 1, - CreatedAt: time.Now(), - } - credentials2 := params.ForgeCredentials{ - ID: 2, - CreatedAt: time.Now().Add(1 * time.Second), - } - SetGiteaCredentials(credentials1) - SetGiteaCredentials(credentials2) - - cachedCreds := GetAllGiteaCredentialsAsMap() - c.Require().Len(cachedCreds, 2) - c.Require().Contains(cachedCreds, credentials1.ID) - c.Require().Contains(cachedCreds, credentials2.ID) -} - -func TestCacheTestSuite(t *testing.T) { - t.Parallel() - suite.Run(t, new(CacheTestSuite)) -} diff --git a/cache/credentials_cache.go b/cache/credentials_cache.go deleted file mode 100644 index 3cb5c71d..00000000 --- a/cache/credentials_cache.go +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package cache - -import ( - "sync" - - "github.com/cloudbase/garm/params" -) - -var ( - credentialsCache *CredentialCache - giteaCredentialsCache *CredentialCache -) - -func init() { - ghCredentialsCache := &CredentialCache{ - cache: make(map[uint]params.ForgeCredentials), - } - gtCredentialsCache := &CredentialCache{ - cache: make(map[uint]params.ForgeCredentials), - } - - credentialsCache = ghCredentialsCache - giteaCredentialsCache = gtCredentialsCache -} - -type CredentialCache struct { - mux sync.Mutex - - cache map[uint]params.ForgeCredentials -} - -func (g *CredentialCache) SetCredentialsRateLimit(credsID uint, rateLimit params.GithubRateLimit) { - g.mux.Lock() - defer g.mux.Unlock() - - if creds, ok := g.cache[credsID]; ok { - creds.RateLimit = &rateLimit - g.cache[credsID] = creds - } -} - -func (g *CredentialCache) SetCredentials(credentials params.ForgeCredentials) { - g.mux.Lock() - defer g.mux.Unlock() - - g.cache[credentials.ID] = credentials - UpdateCredentialsInAffectedEntities(credentials) -} - -func (g *CredentialCache) GetCredentials(id uint) (params.ForgeCredentials, bool) { - g.mux.Lock() - defer g.mux.Unlock() - - if creds, ok := g.cache[id]; ok { - return creds, true - } - return params.ForgeCredentials{}, false -} - -func (g *CredentialCache) DeleteCredentials(id uint) { - g.mux.Lock() - defer g.mux.Unlock() - - delete(g.cache, id) -} - -func (g *CredentialCache) GetAllCredentials() []params.ForgeCredentials { - g.mux.Lock() - defer g.mux.Unlock() - - creds := make([]params.ForgeCredentials, 0, len(g.cache)) - for _, cred := range g.cache { - creds = append(creds, cred) - } - - // Sort the credentials by ID - sortByID(creds) - return creds -} - -func (g *CredentialCache) GetAllCredentialsAsMap() map[uint]params.ForgeCredentials { - g.mux.Lock() - defer g.mux.Unlock() - - creds := make(map[uint]params.ForgeCredentials, len(g.cache)) - for id, cred := range g.cache { - creds[id] = cred - } - - return creds -} - -func SetGithubCredentials(credentials params.ForgeCredentials) { - credentialsCache.SetCredentials(credentials) -} - -func GetGithubCredentials(id uint) (params.ForgeCredentials, bool) { - return credentialsCache.GetCredentials(id) -} - -func DeleteGithubCredentials(id uint) { - credentialsCache.DeleteCredentials(id) -} - -func GetAllGithubCredentials() []params.ForgeCredentials { - return credentialsCache.GetAllCredentials() -} - -func SetCredentialsRateLimit(credsID uint, rateLimit params.GithubRateLimit) { - credentialsCache.SetCredentialsRateLimit(credsID, rateLimit) -} - -func GetAllGithubCredentialsAsMap() map[uint]params.ForgeCredentials { - return credentialsCache.GetAllCredentialsAsMap() -} - -func SetGiteaCredentials(credentials params.ForgeCredentials) { - giteaCredentialsCache.SetCredentials(credentials) -} - -func GetGiteaCredentials(id uint) (params.ForgeCredentials, bool) { - return giteaCredentialsCache.GetCredentials(id) -} - -func DeleteGiteaCredentials(id uint) { - giteaCredentialsCache.DeleteCredentials(id) -} - -func GetAllGiteaCredentials() []params.ForgeCredentials { - return giteaCredentialsCache.GetAllCredentials() -} - -func GetAllGiteaCredentialsAsMap() map[uint]params.ForgeCredentials { - return giteaCredentialsCache.GetAllCredentialsAsMap() -} diff --git a/cache/entity_cache.go b/cache/entity_cache.go deleted file mode 100644 index c676332f..00000000 --- a/cache/entity_cache.go +++ /dev/null @@ -1,435 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package cache - -import ( - "sync" - "time" - - "github.com/cloudbase/garm/params" -) - -var entityCache *EntityCache - -func init() { - ghEntityCache := &EntityCache{ - entities: make(map[string]EntityItem), - } - entityCache = ghEntityCache -} - -type RunnerGroupEntry struct { - RunnerGroupID int64 - time time.Time -} - -type EntityItem struct { - Entity params.ForgeEntity - Pools map[string]params.Pool - ScaleSets map[uint]params.ScaleSet - RunnerGroups map[string]RunnerGroupEntry -} - -type EntityCache struct { - mux sync.Mutex - // entity IDs are UUID4s. It is highly unlikely they will collide (🤞). - entities map[string]EntityItem -} - -func (e *EntityCache) UpdateCredentialsInAffectedEntities(creds params.ForgeCredentials) { - e.mux.Lock() - defer e.mux.Unlock() - - for entityID, cache := range e.entities { - if cache.Entity.Credentials.GetID() == creds.GetID() { - cache.Entity.Credentials = creds - e.entities[entityID] = cache - } - } -} - -func (e *EntityCache) GetEntity(entityID string) (params.ForgeEntity, bool) { - e.mux.Lock() - defer e.mux.Unlock() - - if cache, ok := e.entities[entityID]; ok { - var creds params.ForgeCredentials - var ok bool - switch cache.Entity.Credentials.ForgeType { - case params.GithubEndpointType: - creds, ok = GetGithubCredentials(cache.Entity.Credentials.ID) - case params.GiteaEndpointType: - creds, ok = GetGiteaCredentials(cache.Entity.Credentials.ID) - } - if ok { - cache.Entity.Credentials = creds - } - return cache.Entity, true - } - return params.ForgeEntity{}, false -} - -func (e *EntityCache) SetEntity(entity params.ForgeEntity) { - e.mux.Lock() - defer e.mux.Unlock() - - cache, ok := e.entities[entity.ID] - if !ok { - e.entities[entity.ID] = EntityItem{ - Entity: entity, - Pools: make(map[string]params.Pool), - ScaleSets: make(map[uint]params.ScaleSet), - RunnerGroups: make(map[string]RunnerGroupEntry), - } - return - } - cache.Entity = entity - e.entities[entity.ID] = cache -} - -func (e *EntityCache) ReplaceEntityPools(entityID string, pools []params.Pool) { - e.mux.Lock() - defer e.mux.Unlock() - - cache, ok := e.entities[entityID] - if !ok { - return - } - - poolsByID := map[string]params.Pool{} - for _, pool := range pools { - poolsByID[pool.ID] = pool - } - cache.Pools = poolsByID - e.entities[entityID] = cache -} - -func (e *EntityCache) ReplaceEntityScaleSets(entityID string, scaleSets []params.ScaleSet) { - e.mux.Lock() - defer e.mux.Unlock() - - cache, ok := e.entities[entityID] - if !ok { - return - } - - scaleSetsByID := map[uint]params.ScaleSet{} - for _, scaleSet := range scaleSets { - scaleSetsByID[scaleSet.ID] = scaleSet - } - cache.ScaleSets = scaleSetsByID - e.entities[entityID] = cache -} - -func (e *EntityCache) DeleteEntity(entityID string) { - e.mux.Lock() - defer e.mux.Unlock() - delete(e.entities, entityID) -} - -func (e *EntityCache) SetEntityPool(entityID string, pool params.Pool) { - e.mux.Lock() - defer e.mux.Unlock() - - if cache, ok := e.entities[entityID]; ok { - cache.Pools[pool.ID] = pool - e.entities[entityID] = cache - } -} - -func (e *EntityCache) SetEntityScaleSet(entityID string, scaleSet params.ScaleSet) { - e.mux.Lock() - defer e.mux.Unlock() - - if cache, ok := e.entities[entityID]; ok { - cache.ScaleSets[scaleSet.ID] = scaleSet - e.entities[entityID] = cache - } -} - -func (e *EntityCache) DeleteEntityPool(entityID string, poolID string) { - e.mux.Lock() - defer e.mux.Unlock() - - if cache, ok := e.entities[entityID]; ok { - delete(cache.Pools, poolID) - e.entities[entityID] = cache - } -} - -func (e *EntityCache) DeleteEntityScaleSet(entityID string, scaleSetID uint) { - e.mux.Lock() - defer e.mux.Unlock() - - if cache, ok := e.entities[entityID]; ok { - delete(cache.ScaleSets, scaleSetID) - e.entities[entityID] = cache - } -} - -func (e *EntityCache) GetEntityPool(entityID string, poolID string) (params.Pool, bool) { - e.mux.Lock() - defer e.mux.Unlock() - - if cache, ok := e.entities[entityID]; ok { - if pool, ok := cache.Pools[poolID]; ok { - return pool, true - } - } - return params.Pool{}, false -} - -func (e *EntityCache) GetEntityScaleSet(entityID string, scaleSetID uint) (params.ScaleSet, bool) { - e.mux.Lock() - defer e.mux.Unlock() - - if cache, ok := e.entities[entityID]; ok { - if scaleSet, ok := cache.ScaleSets[scaleSetID]; ok { - return scaleSet, true - } - } - return params.ScaleSet{}, false -} - -func (e *EntityCache) FindPoolsMatchingAllTags(entityID string, tags []string) []params.Pool { - e.mux.Lock() - defer e.mux.Unlock() - - if cache, ok := e.entities[entityID]; ok { - var pools []params.Pool - for _, pool := range cache.Pools { - if pool.HasRequiredLabels(tags) { - pools = append(pools, pool) - } - } - // Sort the pools by creation date. - sortByCreationDate(pools) - return pools - } - return nil -} - -func (e *EntityCache) GetEntityPools(entityID string) []params.Pool { - e.mux.Lock() - defer e.mux.Unlock() - - if cache, ok := e.entities[entityID]; ok { - var pools []params.Pool - for _, pool := range cache.Pools { - pools = append(pools, pool) - } - // Sort the pools by creation date. - sortByCreationDate(pools) - return pools - } - return nil -} - -func (e *EntityCache) GetEntityScaleSets(entityID string) []params.ScaleSet { - e.mux.Lock() - defer e.mux.Unlock() - - if cache, ok := e.entities[entityID]; ok { - var scaleSets []params.ScaleSet - for _, scaleSet := range cache.ScaleSets { - scaleSets = append(scaleSets, scaleSet) - } - // Sort the scale sets by creation date. - sortByID(scaleSets) - return scaleSets - } - return nil -} - -func (e *EntityCache) GetEntitiesUsingCredentials(creds params.ForgeCredentials) []params.ForgeEntity { - e.mux.Lock() - defer e.mux.Unlock() - - var entities []params.ForgeEntity - for _, cache := range e.entities { - if cache.Entity.Credentials.ForgeType != creds.ForgeType { - continue - } - - if cache.Entity.Credentials.GetID() == creds.GetID() { - entities = append(entities, cache.Entity) - } - } - sortByCreationDate(entities) - return entities -} - -func (e *EntityCache) GetAllEntities() []params.ForgeEntity { - e.mux.Lock() - defer e.mux.Unlock() - - var entities []params.ForgeEntity - for _, cache := range e.entities { - // Get the credentials from the credentials cache. - var creds params.ForgeCredentials - var ok bool - switch cache.Entity.Credentials.ForgeType { - case params.GithubEndpointType: - creds, ok = GetGithubCredentials(cache.Entity.Credentials.ID) - case params.GiteaEndpointType: - creds, ok = GetGiteaCredentials(cache.Entity.Credentials.ID) - } - if ok { - cache.Entity.Credentials = creds - } - entities = append(entities, cache.Entity) - } - sortByCreationDate(entities) - return entities -} - -func (e *EntityCache) GetAllPools() []params.Pool { - e.mux.Lock() - defer e.mux.Unlock() - - var pools []params.Pool - for _, cache := range e.entities { - for _, pool := range cache.Pools { - pools = append(pools, pool) - } - } - sortByCreationDate(pools) - return pools -} - -func (e *EntityCache) GetAllScaleSets() []params.ScaleSet { - e.mux.Lock() - defer e.mux.Unlock() - - var scaleSets []params.ScaleSet - for _, cache := range e.entities { - for _, scaleSet := range cache.ScaleSets { - scaleSets = append(scaleSets, scaleSet) - } - } - sortByID(scaleSets) - return scaleSets -} - -func (e *EntityCache) SetEntityRunnerGroup(entityID, runnerGroupName string, runnerGroupID int64) { - e.mux.Lock() - defer e.mux.Unlock() - - if _, ok := e.entities[entityID]; ok { - e.entities[entityID].RunnerGroups[runnerGroupName] = RunnerGroupEntry{ - RunnerGroupID: runnerGroupID, - time: time.Now().UTC(), - } - } -} - -func (e *EntityCache) GetEntityRunnerGroup(entityID, runnerGroupName string) (int64, bool) { - e.mux.Lock() - defer e.mux.Unlock() - - if _, ok := e.entities[entityID]; ok { - if runnerGroup, ok := e.entities[entityID].RunnerGroups[runnerGroupName]; ok { - if time.Now().UTC().After(runnerGroup.time.Add(1 * time.Hour)) { - delete(e.entities[entityID].RunnerGroups, runnerGroupName) - return 0, false - } - return runnerGroup.RunnerGroupID, true - } - } - return 0, false -} - -func SetEntityRunnerGroup(entityID, runnerGroupName string, runnerGroupID int64) { - entityCache.SetEntityRunnerGroup(entityID, runnerGroupName, runnerGroupID) -} - -func GetEntityRunnerGroup(entityID, runnerGroupName string) (int64, bool) { - return entityCache.GetEntityRunnerGroup(entityID, runnerGroupName) -} - -func GetEntity(entityID string) (params.ForgeEntity, bool) { - return entityCache.GetEntity(entityID) -} - -func SetEntity(entity params.ForgeEntity) { - entityCache.SetEntity(entity) -} - -func ReplaceEntityPools(entityID string, pools []params.Pool) { - entityCache.ReplaceEntityPools(entityID, pools) -} - -func ReplaceEntityScaleSets(entityID string, scaleSets []params.ScaleSet) { - entityCache.ReplaceEntityScaleSets(entityID, scaleSets) -} - -func DeleteEntity(entityID string) { - entityCache.DeleteEntity(entityID) -} - -func SetEntityPool(entityID string, pool params.Pool) { - entityCache.SetEntityPool(entityID, pool) -} - -func SetEntityScaleSet(entityID string, scaleSet params.ScaleSet) { - entityCache.SetEntityScaleSet(entityID, scaleSet) -} - -func DeleteEntityPool(entityID string, poolID string) { - entityCache.DeleteEntityPool(entityID, poolID) -} - -func DeleteEntityScaleSet(entityID string, scaleSetID uint) { - entityCache.DeleteEntityScaleSet(entityID, scaleSetID) -} - -func GetEntityPool(entityID string, poolID string) (params.Pool, bool) { - return entityCache.GetEntityPool(entityID, poolID) -} - -func GetEntityScaleSet(entityID string, scaleSetID uint) (params.ScaleSet, bool) { - return entityCache.GetEntityScaleSet(entityID, scaleSetID) -} - -func FindPoolsMatchingAllTags(entityID string, tags []string) []params.Pool { - return entityCache.FindPoolsMatchingAllTags(entityID, tags) -} - -func GetEntityPools(entityID string) []params.Pool { - return entityCache.GetEntityPools(entityID) -} - -func GetEntityScaleSets(entityID string) []params.ScaleSet { - return entityCache.GetEntityScaleSets(entityID) -} - -func UpdateCredentialsInAffectedEntities(creds params.ForgeCredentials) { - entityCache.UpdateCredentialsInAffectedEntities(creds) -} - -func GetEntitiesUsingCredentials(creds params.ForgeCredentials) []params.ForgeEntity { - return entityCache.GetEntitiesUsingCredentials(creds) -} - -func GetAllEntities() []params.ForgeEntity { - return entityCache.GetAllEntities() -} - -func GetAllPools() []params.Pool { - return entityCache.GetAllPools() -} - -func GetAllScaleSets() []params.ScaleSet { - return entityCache.GetAllScaleSets() -} diff --git a/cache/github_client.go b/cache/github_client.go deleted file mode 100644 index 179a9718..00000000 --- a/cache/github_client.go +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package cache - -import ( - "sync" - - "github.com/cloudbase/garm/runner/common" -) - -var ghClientCache *GithubClientCache - -type GithubClientCache struct { - mux sync.Mutex - - cache map[string]common.GithubClient -} - -func init() { - clientCache := &GithubClientCache{ - cache: make(map[string]common.GithubClient), - } - ghClientCache = clientCache -} - -func (g *GithubClientCache) SetClient(entityID string, client common.GithubClient) { - g.mux.Lock() - defer g.mux.Unlock() - - g.cache[entityID] = client -} - -func (g *GithubClientCache) GetClient(entityID string) (common.GithubClient, bool) { - g.mux.Lock() - defer g.mux.Unlock() - - if client, ok := g.cache[entityID]; ok { - return client, true - } - return nil, false -} - -func SetGithubClient(entityID string, client common.GithubClient) { - ghClientCache.SetClient(entityID, client) -} - -func GetGithubClient(entityID string) (common.GithubClient, bool) { - return ghClientCache.GetClient(entityID) -} diff --git a/cache/instance_cache.go b/cache/instance_cache.go deleted file mode 100644 index ae2c1cec..00000000 --- a/cache/instance_cache.go +++ /dev/null @@ -1,143 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package cache - -import ( - "sync" - - "github.com/cloudbase/garm/params" -) - -var instanceCache *InstanceCache - -func init() { - cache := &InstanceCache{ - cache: make(map[string]params.Instance), - } - instanceCache = cache -} - -type InstanceCache struct { - mux sync.Mutex - - cache map[string]params.Instance -} - -func (i *InstanceCache) SetInstance(instance params.Instance) { - i.mux.Lock() - defer i.mux.Unlock() - - i.cache[instance.Name] = instance -} - -func (i *InstanceCache) GetInstance(name string) (params.Instance, bool) { - i.mux.Lock() - defer i.mux.Unlock() - - if instance, ok := i.cache[name]; ok { - return instance, true - } - return params.Instance{}, false -} - -func (i *InstanceCache) DeleteInstance(name string) { - i.mux.Lock() - defer i.mux.Unlock() - - delete(i.cache, name) -} - -func (i *InstanceCache) GetAllInstances() []params.Instance { - i.mux.Lock() - defer i.mux.Unlock() - - instances := make([]params.Instance, 0, len(i.cache)) - for _, instance := range i.cache { - instances = append(instances, instance) - } - sortByCreationDate(instances) - return instances -} - -func (i *InstanceCache) GetInstancesForPool(poolID string) []params.Instance { - i.mux.Lock() - defer i.mux.Unlock() - - var filteredInstances []params.Instance - for _, instance := range i.cache { - if instance.PoolID == poolID { - filteredInstances = append(filteredInstances, instance) - } - } - sortByCreationDate(filteredInstances) - return filteredInstances -} - -func (i *InstanceCache) GetInstancesForScaleSet(scaleSetID uint) []params.Instance { - i.mux.Lock() - defer i.mux.Unlock() - - var filteredInstances []params.Instance - for _, instance := range i.cache { - if instance.ScaleSetID == scaleSetID { - filteredInstances = append(filteredInstances, instance) - } - } - sortByCreationDate(filteredInstances) - return filteredInstances -} - -func (i *InstanceCache) GetEntityInstances(entityID string) []params.Instance { - pools := GetEntityPools(entityID) - poolsAsMap := map[string]bool{} - for _, pool := range pools { - poolsAsMap[pool.ID] = true - } - - ret := []params.Instance{} - for _, val := range i.GetAllInstances() { - if _, ok := poolsAsMap[val.PoolID]; ok { - ret = append(ret, val) - } - } - return ret -} - -func SetInstanceCache(instance params.Instance) { - instanceCache.SetInstance(instance) -} - -func GetInstanceCache(name string) (params.Instance, bool) { - return instanceCache.GetInstance(name) -} - -func DeleteInstanceCache(name string) { - instanceCache.DeleteInstance(name) -} - -func GetAllInstancesCache() []params.Instance { - return instanceCache.GetAllInstances() -} - -func GetInstancesForPool(poolID string) []params.Instance { - return instanceCache.GetInstancesForPool(poolID) -} - -func GetInstancesForScaleSet(scaleSetID uint) []params.Instance { - return instanceCache.GetInstancesForScaleSet(scaleSetID) -} - -func GetEntityInstances(entityID string) []params.Instance { - return instanceCache.GetEntityInstances(entityID) -} diff --git a/cache/tools_cache.go b/cache/tools_cache.go deleted file mode 100644 index 30e83a0e..00000000 --- a/cache/tools_cache.go +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package cache - -import ( - "fmt" - "sync" - "time" - - commonParams "github.com/cloudbase/garm-provider-common/params" - "github.com/cloudbase/garm/params" -) - -var githubToolsCache *GithubToolsCache - -func init() { - ghToolsCache := &GithubToolsCache{ - entities: make(map[string]GithubEntityTools), - } - githubToolsCache = ghToolsCache -} - -type GithubEntityTools struct { - updatedAt time.Time - expiresAt time.Time - err error - entity params.ForgeEntity - tools []commonParams.RunnerApplicationDownload -} - -type GithubToolsCache struct { - mux sync.Mutex - // entity IDs are UUID4s. It is highly unlikely they will collide (🤞). - entities map[string]GithubEntityTools -} - -func (g *GithubToolsCache) Get(entityID string) ([]commonParams.RunnerApplicationDownload, error) { - g.mux.Lock() - defer g.mux.Unlock() - - if cache, ok := g.entities[entityID]; ok { - if cache.entity.Credentials.ForgeType == params.GithubEndpointType { - if time.Now().UTC().After(cache.expiresAt.Add(-5 * time.Minute)) { - // Stale cache, remove it. - delete(g.entities, entityID) - return nil, fmt.Errorf("cache expired for entity %s", entityID) - } - } - if cache.err != nil { - return nil, cache.err - } - return cache.tools, nil - } - return nil, fmt.Errorf("no cache found for entity %s", entityID) -} - -func (g *GithubToolsCache) Set(entity params.ForgeEntity, tools []commonParams.RunnerApplicationDownload) { - g.mux.Lock() - defer g.mux.Unlock() - - forgeTools := GithubEntityTools{ - updatedAt: time.Now(), - entity: entity, - tools: tools, - err: nil, - } - - if entity.Credentials.ForgeType == params.GithubEndpointType { - forgeTools.expiresAt = time.Now().Add(1 * time.Hour) - } - - g.entities[entity.ID] = forgeTools -} - -func (g *GithubToolsCache) SetToolsError(entity params.ForgeEntity, err error) { - g.mux.Lock() - defer g.mux.Unlock() - - // If the entity is not in the cache, add it with the error. - cache, ok := g.entities[entity.ID] - if !ok { - g.entities[entity.ID] = GithubEntityTools{ - updatedAt: time.Now(), - entity: entity, - err: err, - } - return - } - - // Update the error for the existing entity. - cache.err = err - g.entities[entity.ID] = cache -} - -func SetGithubToolsCache(entity params.ForgeEntity, tools []commonParams.RunnerApplicationDownload) { - githubToolsCache.Set(entity, tools) -} - -func GetGithubToolsCache(entityID string) ([]commonParams.RunnerApplicationDownload, error) { - return githubToolsCache.Get(entityID) -} - -func SetGithubToolsCacheError(entity params.ForgeEntity, err error) { - githubToolsCache.SetToolsError(entity, err) -} diff --git a/cache/util.go b/cache/util.go deleted file mode 100644 index 5fd234a9..00000000 --- a/cache/util.go +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package cache - -import ( - "sort" - - "github.com/cloudbase/garm/params" -) - -func sortByID[T params.IDGetter](s []T) { - sort.Slice(s, func(i, j int) bool { - return s[i].GetID() < s[j].GetID() - }) -} - -func sortByCreationDate[T params.CreationDateGetter](s []T) { - sort.Slice(s, func(i, j int) bool { - return s[i].GetCreatedAt().Before(s[j].GetCreatedAt()) - }) -} diff --git a/client/credentials/create_credentials_responses.go b/client/credentials/create_credentials_responses.go index a0037edf..cc5dc5dc 100644 --- a/client/credentials/create_credentials_responses.go +++ b/client/credentials/create_credentials_responses.go @@ -50,10 +50,10 @@ func NewCreateCredentialsOK() *CreateCredentialsOK { /* CreateCredentialsOK describes a response with status code 200, with default header values. -ForgeCredentials +GithubCredentials */ type CreateCredentialsOK struct { - Payload garm_params.ForgeCredentials + Payload garm_params.GithubCredentials } // IsSuccess returns true when this create credentials o k response has a 2xx status code @@ -96,7 +96,7 @@ func (o *CreateCredentialsOK) String() string { return fmt.Sprintf("[POST /github/credentials][%d] createCredentialsOK %s", 200, payload) } -func (o *CreateCredentialsOK) GetPayload() garm_params.ForgeCredentials { +func (o *CreateCredentialsOK) GetPayload() garm_params.GithubCredentials { return o.Payload } diff --git a/client/credentials/create_gitea_credentials_parameters.go b/client/credentials/create_gitea_credentials_parameters.go deleted file mode 100644 index 6e255bfa..00000000 --- a/client/credentials/create_gitea_credentials_parameters.go +++ /dev/null @@ -1,151 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package credentials - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - "net/http" - "time" - - "github.com/go-openapi/errors" - "github.com/go-openapi/runtime" - cr "github.com/go-openapi/runtime/client" - "github.com/go-openapi/strfmt" - - garm_params "github.com/cloudbase/garm/params" -) - -// NewCreateGiteaCredentialsParams creates a new CreateGiteaCredentialsParams object, -// with the default timeout for this client. -// -// Default values are not hydrated, since defaults are normally applied by the API server side. -// -// To enforce default values in parameter, use SetDefaults or WithDefaults. -func NewCreateGiteaCredentialsParams() *CreateGiteaCredentialsParams { - return &CreateGiteaCredentialsParams{ - timeout: cr.DefaultTimeout, - } -} - -// NewCreateGiteaCredentialsParamsWithTimeout creates a new CreateGiteaCredentialsParams object -// with the ability to set a timeout on a request. -func NewCreateGiteaCredentialsParamsWithTimeout(timeout time.Duration) *CreateGiteaCredentialsParams { - return &CreateGiteaCredentialsParams{ - timeout: timeout, - } -} - -// NewCreateGiteaCredentialsParamsWithContext creates a new CreateGiteaCredentialsParams object -// with the ability to set a context for a request. -func NewCreateGiteaCredentialsParamsWithContext(ctx context.Context) *CreateGiteaCredentialsParams { - return &CreateGiteaCredentialsParams{ - Context: ctx, - } -} - -// NewCreateGiteaCredentialsParamsWithHTTPClient creates a new CreateGiteaCredentialsParams object -// with the ability to set a custom HTTPClient for a request. -func NewCreateGiteaCredentialsParamsWithHTTPClient(client *http.Client) *CreateGiteaCredentialsParams { - return &CreateGiteaCredentialsParams{ - HTTPClient: client, - } -} - -/* -CreateGiteaCredentialsParams contains all the parameters to send to the API endpoint - - for the create gitea credentials operation. - - Typically these are written to a http.Request. -*/ -type CreateGiteaCredentialsParams struct { - - /* Body. - - Parameters used when creating a Gitea credential. - */ - Body garm_params.CreateGiteaCredentialsParams - - timeout time.Duration - Context context.Context - HTTPClient *http.Client -} - -// WithDefaults hydrates default values in the create gitea credentials params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *CreateGiteaCredentialsParams) WithDefaults() *CreateGiteaCredentialsParams { - o.SetDefaults() - return o -} - -// SetDefaults hydrates default values in the create gitea credentials params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *CreateGiteaCredentialsParams) SetDefaults() { - // no default values defined for this parameter -} - -// WithTimeout adds the timeout to the create gitea credentials params -func (o *CreateGiteaCredentialsParams) WithTimeout(timeout time.Duration) *CreateGiteaCredentialsParams { - o.SetTimeout(timeout) - return o -} - -// SetTimeout adds the timeout to the create gitea credentials params -func (o *CreateGiteaCredentialsParams) SetTimeout(timeout time.Duration) { - o.timeout = timeout -} - -// WithContext adds the context to the create gitea credentials params -func (o *CreateGiteaCredentialsParams) WithContext(ctx context.Context) *CreateGiteaCredentialsParams { - o.SetContext(ctx) - return o -} - -// SetContext adds the context to the create gitea credentials params -func (o *CreateGiteaCredentialsParams) SetContext(ctx context.Context) { - o.Context = ctx -} - -// WithHTTPClient adds the HTTPClient to the create gitea credentials params -func (o *CreateGiteaCredentialsParams) WithHTTPClient(client *http.Client) *CreateGiteaCredentialsParams { - o.SetHTTPClient(client) - return o -} - -// SetHTTPClient adds the HTTPClient to the create gitea credentials params -func (o *CreateGiteaCredentialsParams) SetHTTPClient(client *http.Client) { - o.HTTPClient = client -} - -// WithBody adds the body to the create gitea credentials params -func (o *CreateGiteaCredentialsParams) WithBody(body garm_params.CreateGiteaCredentialsParams) *CreateGiteaCredentialsParams { - o.SetBody(body) - return o -} - -// SetBody adds the body to the create gitea credentials params -func (o *CreateGiteaCredentialsParams) SetBody(body garm_params.CreateGiteaCredentialsParams) { - o.Body = body -} - -// WriteToRequest writes these params to a swagger request -func (o *CreateGiteaCredentialsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { - - if err := r.SetTimeout(o.timeout); err != nil { - return err - } - var res []error - if err := r.SetBodyParam(o.Body); err != nil { - return err - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} diff --git a/client/credentials/create_gitea_credentials_responses.go b/client/credentials/create_gitea_credentials_responses.go deleted file mode 100644 index 2389cb04..00000000 --- a/client/credentials/create_gitea_credentials_responses.go +++ /dev/null @@ -1,179 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package credentials - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "encoding/json" - "fmt" - "io" - - "github.com/go-openapi/runtime" - "github.com/go-openapi/strfmt" - - apiserver_params "github.com/cloudbase/garm/apiserver/params" - garm_params "github.com/cloudbase/garm/params" -) - -// CreateGiteaCredentialsReader is a Reader for the CreateGiteaCredentials structure. -type CreateGiteaCredentialsReader struct { - formats strfmt.Registry -} - -// ReadResponse reads a server response into the received o. -func (o *CreateGiteaCredentialsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { - switch response.Code() { - case 200: - result := NewCreateGiteaCredentialsOK() - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - return result, nil - case 400: - result := NewCreateGiteaCredentialsBadRequest() - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - return nil, result - default: - return nil, runtime.NewAPIError("[POST /gitea/credentials] CreateGiteaCredentials", response, response.Code()) - } -} - -// NewCreateGiteaCredentialsOK creates a CreateGiteaCredentialsOK with default headers values -func NewCreateGiteaCredentialsOK() *CreateGiteaCredentialsOK { - return &CreateGiteaCredentialsOK{} -} - -/* -CreateGiteaCredentialsOK describes a response with status code 200, with default header values. - -ForgeCredentials -*/ -type CreateGiteaCredentialsOK struct { - Payload garm_params.ForgeCredentials -} - -// IsSuccess returns true when this create gitea credentials o k response has a 2xx status code -func (o *CreateGiteaCredentialsOK) IsSuccess() bool { - return true -} - -// IsRedirect returns true when this create gitea credentials o k response has a 3xx status code -func (o *CreateGiteaCredentialsOK) IsRedirect() bool { - return false -} - -// IsClientError returns true when this create gitea credentials o k response has a 4xx status code -func (o *CreateGiteaCredentialsOK) IsClientError() bool { - return false -} - -// IsServerError returns true when this create gitea credentials o k response has a 5xx status code -func (o *CreateGiteaCredentialsOK) IsServerError() bool { - return false -} - -// IsCode returns true when this create gitea credentials o k response a status code equal to that given -func (o *CreateGiteaCredentialsOK) IsCode(code int) bool { - return code == 200 -} - -// Code gets the status code for the create gitea credentials o k response -func (o *CreateGiteaCredentialsOK) Code() int { - return 200 -} - -func (o *CreateGiteaCredentialsOK) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[POST /gitea/credentials][%d] createGiteaCredentialsOK %s", 200, payload) -} - -func (o *CreateGiteaCredentialsOK) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[POST /gitea/credentials][%d] createGiteaCredentialsOK %s", 200, payload) -} - -func (o *CreateGiteaCredentialsOK) GetPayload() garm_params.ForgeCredentials { - return o.Payload -} - -func (o *CreateGiteaCredentialsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} - -// NewCreateGiteaCredentialsBadRequest creates a CreateGiteaCredentialsBadRequest with default headers values -func NewCreateGiteaCredentialsBadRequest() *CreateGiteaCredentialsBadRequest { - return &CreateGiteaCredentialsBadRequest{} -} - -/* -CreateGiteaCredentialsBadRequest describes a response with status code 400, with default header values. - -APIErrorResponse -*/ -type CreateGiteaCredentialsBadRequest struct { - Payload apiserver_params.APIErrorResponse -} - -// IsSuccess returns true when this create gitea credentials bad request response has a 2xx status code -func (o *CreateGiteaCredentialsBadRequest) IsSuccess() bool { - return false -} - -// IsRedirect returns true when this create gitea credentials bad request response has a 3xx status code -func (o *CreateGiteaCredentialsBadRequest) IsRedirect() bool { - return false -} - -// IsClientError returns true when this create gitea credentials bad request response has a 4xx status code -func (o *CreateGiteaCredentialsBadRequest) IsClientError() bool { - return true -} - -// IsServerError returns true when this create gitea credentials bad request response has a 5xx status code -func (o *CreateGiteaCredentialsBadRequest) IsServerError() bool { - return false -} - -// IsCode returns true when this create gitea credentials bad request response a status code equal to that given -func (o *CreateGiteaCredentialsBadRequest) IsCode(code int) bool { - return code == 400 -} - -// Code gets the status code for the create gitea credentials bad request response -func (o *CreateGiteaCredentialsBadRequest) Code() int { - return 400 -} - -func (o *CreateGiteaCredentialsBadRequest) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[POST /gitea/credentials][%d] createGiteaCredentialsBadRequest %s", 400, payload) -} - -func (o *CreateGiteaCredentialsBadRequest) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[POST /gitea/credentials][%d] createGiteaCredentialsBadRequest %s", 400, payload) -} - -func (o *CreateGiteaCredentialsBadRequest) GetPayload() apiserver_params.APIErrorResponse { - return o.Payload -} - -func (o *CreateGiteaCredentialsBadRequest) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} diff --git a/client/credentials/credentials_client.go b/client/credentials/credentials_client.go index 3dfe1abd..9d7b0563 100644 --- a/client/credentials/credentials_client.go +++ b/client/credentials/credentials_client.go @@ -58,24 +58,14 @@ type ClientOption func(*runtime.ClientOperation) type ClientService interface { CreateCredentials(params *CreateCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateCredentialsOK, error) - CreateGiteaCredentials(params *CreateGiteaCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateGiteaCredentialsOK, error) - DeleteCredentials(params *DeleteCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error - DeleteGiteaCredentials(params *DeleteGiteaCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error - GetCredentials(params *GetCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*GetCredentialsOK, error) - GetGiteaCredentials(params *GetGiteaCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*GetGiteaCredentialsOK, error) - ListCredentials(params *ListCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListCredentialsOK, error) - ListGiteaCredentials(params *ListGiteaCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListGiteaCredentialsOK, error) - UpdateCredentials(params *UpdateCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UpdateCredentialsOK, error) - UpdateGiteaCredentials(params *UpdateGiteaCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UpdateGiteaCredentialsOK, error) - SetTransport(transport runtime.ClientTransport) } @@ -118,45 +108,6 @@ func (a *Client) CreateCredentials(params *CreateCredentialsParams, authInfo run panic(msg) } -/* -CreateGiteaCredentials creates a gitea credential -*/ -func (a *Client) CreateGiteaCredentials(params *CreateGiteaCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateGiteaCredentialsOK, error) { - // TODO: Validate the params before sending - if params == nil { - params = NewCreateGiteaCredentialsParams() - } - op := &runtime.ClientOperation{ - ID: "CreateGiteaCredentials", - Method: "POST", - PathPattern: "/gitea/credentials", - ProducesMediaTypes: []string{"application/json"}, - ConsumesMediaTypes: []string{"application/json"}, - Schemes: []string{"http"}, - Params: params, - Reader: &CreateGiteaCredentialsReader{formats: a.formats}, - AuthInfo: authInfo, - Context: params.Context, - Client: params.HTTPClient, - } - for _, opt := range opts { - opt(op) - } - - result, err := a.transport.Submit(op) - if err != nil { - return nil, err - } - success, ok := result.(*CreateGiteaCredentialsOK) - if ok { - return success, nil - } - // unexpected success response - // safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue - msg := fmt.Sprintf("unexpected success response for CreateGiteaCredentials: API contract not enforced by server. Client expected to get an error, but got: %T", result) - panic(msg) -} - /* DeleteCredentials deletes a git hub credential */ @@ -189,38 +140,6 @@ func (a *Client) DeleteCredentials(params *DeleteCredentialsParams, authInfo run return nil } -/* -DeleteGiteaCredentials deletes a gitea credential -*/ -func (a *Client) DeleteGiteaCredentials(params *DeleteGiteaCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error { - // TODO: Validate the params before sending - if params == nil { - params = NewDeleteGiteaCredentialsParams() - } - op := &runtime.ClientOperation{ - ID: "DeleteGiteaCredentials", - Method: "DELETE", - PathPattern: "/gitea/credentials/{id}", - ProducesMediaTypes: []string{"application/json"}, - ConsumesMediaTypes: []string{"application/json"}, - Schemes: []string{"http"}, - Params: params, - Reader: &DeleteGiteaCredentialsReader{formats: a.formats}, - AuthInfo: authInfo, - Context: params.Context, - Client: params.HTTPClient, - } - for _, opt := range opts { - opt(op) - } - - _, err := a.transport.Submit(op) - if err != nil { - return err - } - return nil -} - /* GetCredentials gets a git hub credential */ @@ -260,45 +179,6 @@ func (a *Client) GetCredentials(params *GetCredentialsParams, authInfo runtime.C panic(msg) } -/* -GetGiteaCredentials gets a gitea credential -*/ -func (a *Client) GetGiteaCredentials(params *GetGiteaCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*GetGiteaCredentialsOK, error) { - // TODO: Validate the params before sending - if params == nil { - params = NewGetGiteaCredentialsParams() - } - op := &runtime.ClientOperation{ - ID: "GetGiteaCredentials", - Method: "GET", - PathPattern: "/gitea/credentials/{id}", - ProducesMediaTypes: []string{"application/json"}, - ConsumesMediaTypes: []string{"application/json"}, - Schemes: []string{"http"}, - Params: params, - Reader: &GetGiteaCredentialsReader{formats: a.formats}, - AuthInfo: authInfo, - Context: params.Context, - Client: params.HTTPClient, - } - for _, opt := range opts { - opt(op) - } - - result, err := a.transport.Submit(op) - if err != nil { - return nil, err - } - success, ok := result.(*GetGiteaCredentialsOK) - if ok { - return success, nil - } - // unexpected success response - // safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue - msg := fmt.Sprintf("unexpected success response for GetGiteaCredentials: API contract not enforced by server. Client expected to get an error, but got: %T", result) - panic(msg) -} - /* ListCredentials lists all credentials */ @@ -338,45 +218,6 @@ func (a *Client) ListCredentials(params *ListCredentialsParams, authInfo runtime panic(msg) } -/* -ListGiteaCredentials lists all credentials -*/ -func (a *Client) ListGiteaCredentials(params *ListGiteaCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListGiteaCredentialsOK, error) { - // TODO: Validate the params before sending - if params == nil { - params = NewListGiteaCredentialsParams() - } - op := &runtime.ClientOperation{ - ID: "ListGiteaCredentials", - Method: "GET", - PathPattern: "/gitea/credentials", - ProducesMediaTypes: []string{"application/json"}, - ConsumesMediaTypes: []string{"application/json"}, - Schemes: []string{"http"}, - Params: params, - Reader: &ListGiteaCredentialsReader{formats: a.formats}, - AuthInfo: authInfo, - Context: params.Context, - Client: params.HTTPClient, - } - for _, opt := range opts { - opt(op) - } - - result, err := a.transport.Submit(op) - if err != nil { - return nil, err - } - success, ok := result.(*ListGiteaCredentialsOK) - if ok { - return success, nil - } - // unexpected success response - // safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue - msg := fmt.Sprintf("unexpected success response for ListGiteaCredentials: API contract not enforced by server. Client expected to get an error, but got: %T", result) - panic(msg) -} - /* UpdateCredentials updates a git hub credential */ @@ -416,45 +257,6 @@ func (a *Client) UpdateCredentials(params *UpdateCredentialsParams, authInfo run panic(msg) } -/* -UpdateGiteaCredentials updates a gitea credential -*/ -func (a *Client) UpdateGiteaCredentials(params *UpdateGiteaCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UpdateGiteaCredentialsOK, error) { - // TODO: Validate the params before sending - if params == nil { - params = NewUpdateGiteaCredentialsParams() - } - op := &runtime.ClientOperation{ - ID: "UpdateGiteaCredentials", - Method: "PUT", - PathPattern: "/gitea/credentials/{id}", - ProducesMediaTypes: []string{"application/json"}, - ConsumesMediaTypes: []string{"application/json"}, - Schemes: []string{"http"}, - Params: params, - Reader: &UpdateGiteaCredentialsReader{formats: a.formats}, - AuthInfo: authInfo, - Context: params.Context, - Client: params.HTTPClient, - } - for _, opt := range opts { - opt(op) - } - - result, err := a.transport.Submit(op) - if err != nil { - return nil, err - } - success, ok := result.(*UpdateGiteaCredentialsOK) - if ok { - return success, nil - } - // unexpected success response - // safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue - msg := fmt.Sprintf("unexpected success response for UpdateGiteaCredentials: API contract not enforced by server. Client expected to get an error, but got: %T", result) - panic(msg) -} - // SetTransport changes the transport on the client func (a *Client) SetTransport(transport runtime.ClientTransport) { a.transport = transport diff --git a/client/credentials/delete_gitea_credentials_parameters.go b/client/credentials/delete_gitea_credentials_parameters.go deleted file mode 100644 index 598ac477..00000000 --- a/client/credentials/delete_gitea_credentials_parameters.go +++ /dev/null @@ -1,152 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package credentials - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - "net/http" - "time" - - "github.com/go-openapi/errors" - "github.com/go-openapi/runtime" - cr "github.com/go-openapi/runtime/client" - "github.com/go-openapi/strfmt" - "github.com/go-openapi/swag" -) - -// NewDeleteGiteaCredentialsParams creates a new DeleteGiteaCredentialsParams object, -// with the default timeout for this client. -// -// Default values are not hydrated, since defaults are normally applied by the API server side. -// -// To enforce default values in parameter, use SetDefaults or WithDefaults. -func NewDeleteGiteaCredentialsParams() *DeleteGiteaCredentialsParams { - return &DeleteGiteaCredentialsParams{ - timeout: cr.DefaultTimeout, - } -} - -// NewDeleteGiteaCredentialsParamsWithTimeout creates a new DeleteGiteaCredentialsParams object -// with the ability to set a timeout on a request. -func NewDeleteGiteaCredentialsParamsWithTimeout(timeout time.Duration) *DeleteGiteaCredentialsParams { - return &DeleteGiteaCredentialsParams{ - timeout: timeout, - } -} - -// NewDeleteGiteaCredentialsParamsWithContext creates a new DeleteGiteaCredentialsParams object -// with the ability to set a context for a request. -func NewDeleteGiteaCredentialsParamsWithContext(ctx context.Context) *DeleteGiteaCredentialsParams { - return &DeleteGiteaCredentialsParams{ - Context: ctx, - } -} - -// NewDeleteGiteaCredentialsParamsWithHTTPClient creates a new DeleteGiteaCredentialsParams object -// with the ability to set a custom HTTPClient for a request. -func NewDeleteGiteaCredentialsParamsWithHTTPClient(client *http.Client) *DeleteGiteaCredentialsParams { - return &DeleteGiteaCredentialsParams{ - HTTPClient: client, - } -} - -/* -DeleteGiteaCredentialsParams contains all the parameters to send to the API endpoint - - for the delete gitea credentials operation. - - Typically these are written to a http.Request. -*/ -type DeleteGiteaCredentialsParams struct { - - /* ID. - - ID of the Gitea credential. - */ - ID int64 - - timeout time.Duration - Context context.Context - HTTPClient *http.Client -} - -// WithDefaults hydrates default values in the delete gitea credentials params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *DeleteGiteaCredentialsParams) WithDefaults() *DeleteGiteaCredentialsParams { - o.SetDefaults() - return o -} - -// SetDefaults hydrates default values in the delete gitea credentials params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *DeleteGiteaCredentialsParams) SetDefaults() { - // no default values defined for this parameter -} - -// WithTimeout adds the timeout to the delete gitea credentials params -func (o *DeleteGiteaCredentialsParams) WithTimeout(timeout time.Duration) *DeleteGiteaCredentialsParams { - o.SetTimeout(timeout) - return o -} - -// SetTimeout adds the timeout to the delete gitea credentials params -func (o *DeleteGiteaCredentialsParams) SetTimeout(timeout time.Duration) { - o.timeout = timeout -} - -// WithContext adds the context to the delete gitea credentials params -func (o *DeleteGiteaCredentialsParams) WithContext(ctx context.Context) *DeleteGiteaCredentialsParams { - o.SetContext(ctx) - return o -} - -// SetContext adds the context to the delete gitea credentials params -func (o *DeleteGiteaCredentialsParams) SetContext(ctx context.Context) { - o.Context = ctx -} - -// WithHTTPClient adds the HTTPClient to the delete gitea credentials params -func (o *DeleteGiteaCredentialsParams) WithHTTPClient(client *http.Client) *DeleteGiteaCredentialsParams { - o.SetHTTPClient(client) - return o -} - -// SetHTTPClient adds the HTTPClient to the delete gitea credentials params -func (o *DeleteGiteaCredentialsParams) SetHTTPClient(client *http.Client) { - o.HTTPClient = client -} - -// WithID adds the id to the delete gitea credentials params -func (o *DeleteGiteaCredentialsParams) WithID(id int64) *DeleteGiteaCredentialsParams { - o.SetID(id) - return o -} - -// SetID adds the id to the delete gitea credentials params -func (o *DeleteGiteaCredentialsParams) SetID(id int64) { - o.ID = id -} - -// WriteToRequest writes these params to a swagger request -func (o *DeleteGiteaCredentialsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { - - if err := r.SetTimeout(o.timeout); err != nil { - return err - } - var res []error - - // path param id - if err := r.SetPathParam("id", swag.FormatInt64(o.ID)); err != nil { - return err - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} diff --git a/client/credentials/delete_gitea_credentials_responses.go b/client/credentials/delete_gitea_credentials_responses.go deleted file mode 100644 index d1df7b0b..00000000 --- a/client/credentials/delete_gitea_credentials_responses.go +++ /dev/null @@ -1,106 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package credentials - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "encoding/json" - "fmt" - "io" - - "github.com/go-openapi/runtime" - "github.com/go-openapi/strfmt" - - apiserver_params "github.com/cloudbase/garm/apiserver/params" -) - -// DeleteGiteaCredentialsReader is a Reader for the DeleteGiteaCredentials structure. -type DeleteGiteaCredentialsReader struct { - formats strfmt.Registry -} - -// ReadResponse reads a server response into the received o. -func (o *DeleteGiteaCredentialsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { - result := NewDeleteGiteaCredentialsDefault(response.Code()) - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - if response.Code()/100 == 2 { - return result, nil - } - return nil, result -} - -// NewDeleteGiteaCredentialsDefault creates a DeleteGiteaCredentialsDefault with default headers values -func NewDeleteGiteaCredentialsDefault(code int) *DeleteGiteaCredentialsDefault { - return &DeleteGiteaCredentialsDefault{ - _statusCode: code, - } -} - -/* -DeleteGiteaCredentialsDefault describes a response with status code -1, with default header values. - -APIErrorResponse -*/ -type DeleteGiteaCredentialsDefault struct { - _statusCode int - - Payload apiserver_params.APIErrorResponse -} - -// IsSuccess returns true when this delete gitea credentials default response has a 2xx status code -func (o *DeleteGiteaCredentialsDefault) IsSuccess() bool { - return o._statusCode/100 == 2 -} - -// IsRedirect returns true when this delete gitea credentials default response has a 3xx status code -func (o *DeleteGiteaCredentialsDefault) IsRedirect() bool { - return o._statusCode/100 == 3 -} - -// IsClientError returns true when this delete gitea credentials default response has a 4xx status code -func (o *DeleteGiteaCredentialsDefault) IsClientError() bool { - return o._statusCode/100 == 4 -} - -// IsServerError returns true when this delete gitea credentials default response has a 5xx status code -func (o *DeleteGiteaCredentialsDefault) IsServerError() bool { - return o._statusCode/100 == 5 -} - -// IsCode returns true when this delete gitea credentials default response a status code equal to that given -func (o *DeleteGiteaCredentialsDefault) IsCode(code int) bool { - return o._statusCode == code -} - -// Code gets the status code for the delete gitea credentials default response -func (o *DeleteGiteaCredentialsDefault) Code() int { - return o._statusCode -} - -func (o *DeleteGiteaCredentialsDefault) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[DELETE /gitea/credentials/{id}][%d] DeleteGiteaCredentials default %s", o._statusCode, payload) -} - -func (o *DeleteGiteaCredentialsDefault) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[DELETE /gitea/credentials/{id}][%d] DeleteGiteaCredentials default %s", o._statusCode, payload) -} - -func (o *DeleteGiteaCredentialsDefault) GetPayload() apiserver_params.APIErrorResponse { - return o.Payload -} - -func (o *DeleteGiteaCredentialsDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} diff --git a/client/credentials/get_credentials_responses.go b/client/credentials/get_credentials_responses.go index 4538c16e..1c2b800b 100644 --- a/client/credentials/get_credentials_responses.go +++ b/client/credentials/get_credentials_responses.go @@ -50,10 +50,10 @@ func NewGetCredentialsOK() *GetCredentialsOK { /* GetCredentialsOK describes a response with status code 200, with default header values. -ForgeCredentials +GithubCredentials */ type GetCredentialsOK struct { - Payload garm_params.ForgeCredentials + Payload garm_params.GithubCredentials } // IsSuccess returns true when this get credentials o k response has a 2xx status code @@ -96,7 +96,7 @@ func (o *GetCredentialsOK) String() string { return fmt.Sprintf("[GET /github/credentials/{id}][%d] getCredentialsOK %s", 200, payload) } -func (o *GetCredentialsOK) GetPayload() garm_params.ForgeCredentials { +func (o *GetCredentialsOK) GetPayload() garm_params.GithubCredentials { return o.Payload } diff --git a/client/credentials/get_gitea_credentials_parameters.go b/client/credentials/get_gitea_credentials_parameters.go deleted file mode 100644 index a844c326..00000000 --- a/client/credentials/get_gitea_credentials_parameters.go +++ /dev/null @@ -1,152 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package credentials - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - "net/http" - "time" - - "github.com/go-openapi/errors" - "github.com/go-openapi/runtime" - cr "github.com/go-openapi/runtime/client" - "github.com/go-openapi/strfmt" - "github.com/go-openapi/swag" -) - -// NewGetGiteaCredentialsParams creates a new GetGiteaCredentialsParams object, -// with the default timeout for this client. -// -// Default values are not hydrated, since defaults are normally applied by the API server side. -// -// To enforce default values in parameter, use SetDefaults or WithDefaults. -func NewGetGiteaCredentialsParams() *GetGiteaCredentialsParams { - return &GetGiteaCredentialsParams{ - timeout: cr.DefaultTimeout, - } -} - -// NewGetGiteaCredentialsParamsWithTimeout creates a new GetGiteaCredentialsParams object -// with the ability to set a timeout on a request. -func NewGetGiteaCredentialsParamsWithTimeout(timeout time.Duration) *GetGiteaCredentialsParams { - return &GetGiteaCredentialsParams{ - timeout: timeout, - } -} - -// NewGetGiteaCredentialsParamsWithContext creates a new GetGiteaCredentialsParams object -// with the ability to set a context for a request. -func NewGetGiteaCredentialsParamsWithContext(ctx context.Context) *GetGiteaCredentialsParams { - return &GetGiteaCredentialsParams{ - Context: ctx, - } -} - -// NewGetGiteaCredentialsParamsWithHTTPClient creates a new GetGiteaCredentialsParams object -// with the ability to set a custom HTTPClient for a request. -func NewGetGiteaCredentialsParamsWithHTTPClient(client *http.Client) *GetGiteaCredentialsParams { - return &GetGiteaCredentialsParams{ - HTTPClient: client, - } -} - -/* -GetGiteaCredentialsParams contains all the parameters to send to the API endpoint - - for the get gitea credentials operation. - - Typically these are written to a http.Request. -*/ -type GetGiteaCredentialsParams struct { - - /* ID. - - ID of the Gitea credential. - */ - ID int64 - - timeout time.Duration - Context context.Context - HTTPClient *http.Client -} - -// WithDefaults hydrates default values in the get gitea credentials params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *GetGiteaCredentialsParams) WithDefaults() *GetGiteaCredentialsParams { - o.SetDefaults() - return o -} - -// SetDefaults hydrates default values in the get gitea credentials params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *GetGiteaCredentialsParams) SetDefaults() { - // no default values defined for this parameter -} - -// WithTimeout adds the timeout to the get gitea credentials params -func (o *GetGiteaCredentialsParams) WithTimeout(timeout time.Duration) *GetGiteaCredentialsParams { - o.SetTimeout(timeout) - return o -} - -// SetTimeout adds the timeout to the get gitea credentials params -func (o *GetGiteaCredentialsParams) SetTimeout(timeout time.Duration) { - o.timeout = timeout -} - -// WithContext adds the context to the get gitea credentials params -func (o *GetGiteaCredentialsParams) WithContext(ctx context.Context) *GetGiteaCredentialsParams { - o.SetContext(ctx) - return o -} - -// SetContext adds the context to the get gitea credentials params -func (o *GetGiteaCredentialsParams) SetContext(ctx context.Context) { - o.Context = ctx -} - -// WithHTTPClient adds the HTTPClient to the get gitea credentials params -func (o *GetGiteaCredentialsParams) WithHTTPClient(client *http.Client) *GetGiteaCredentialsParams { - o.SetHTTPClient(client) - return o -} - -// SetHTTPClient adds the HTTPClient to the get gitea credentials params -func (o *GetGiteaCredentialsParams) SetHTTPClient(client *http.Client) { - o.HTTPClient = client -} - -// WithID adds the id to the get gitea credentials params -func (o *GetGiteaCredentialsParams) WithID(id int64) *GetGiteaCredentialsParams { - o.SetID(id) - return o -} - -// SetID adds the id to the get gitea credentials params -func (o *GetGiteaCredentialsParams) SetID(id int64) { - o.ID = id -} - -// WriteToRequest writes these params to a swagger request -func (o *GetGiteaCredentialsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { - - if err := r.SetTimeout(o.timeout); err != nil { - return err - } - var res []error - - // path param id - if err := r.SetPathParam("id", swag.FormatInt64(o.ID)); err != nil { - return err - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} diff --git a/client/credentials/get_gitea_credentials_responses.go b/client/credentials/get_gitea_credentials_responses.go deleted file mode 100644 index ba116d63..00000000 --- a/client/credentials/get_gitea_credentials_responses.go +++ /dev/null @@ -1,179 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package credentials - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "encoding/json" - "fmt" - "io" - - "github.com/go-openapi/runtime" - "github.com/go-openapi/strfmt" - - apiserver_params "github.com/cloudbase/garm/apiserver/params" - garm_params "github.com/cloudbase/garm/params" -) - -// GetGiteaCredentialsReader is a Reader for the GetGiteaCredentials structure. -type GetGiteaCredentialsReader struct { - formats strfmt.Registry -} - -// ReadResponse reads a server response into the received o. -func (o *GetGiteaCredentialsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { - switch response.Code() { - case 200: - result := NewGetGiteaCredentialsOK() - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - return result, nil - case 400: - result := NewGetGiteaCredentialsBadRequest() - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - return nil, result - default: - return nil, runtime.NewAPIError("[GET /gitea/credentials/{id}] GetGiteaCredentials", response, response.Code()) - } -} - -// NewGetGiteaCredentialsOK creates a GetGiteaCredentialsOK with default headers values -func NewGetGiteaCredentialsOK() *GetGiteaCredentialsOK { - return &GetGiteaCredentialsOK{} -} - -/* -GetGiteaCredentialsOK describes a response with status code 200, with default header values. - -ForgeCredentials -*/ -type GetGiteaCredentialsOK struct { - Payload garm_params.ForgeCredentials -} - -// IsSuccess returns true when this get gitea credentials o k response has a 2xx status code -func (o *GetGiteaCredentialsOK) IsSuccess() bool { - return true -} - -// IsRedirect returns true when this get gitea credentials o k response has a 3xx status code -func (o *GetGiteaCredentialsOK) IsRedirect() bool { - return false -} - -// IsClientError returns true when this get gitea credentials o k response has a 4xx status code -func (o *GetGiteaCredentialsOK) IsClientError() bool { - return false -} - -// IsServerError returns true when this get gitea credentials o k response has a 5xx status code -func (o *GetGiteaCredentialsOK) IsServerError() bool { - return false -} - -// IsCode returns true when this get gitea credentials o k response a status code equal to that given -func (o *GetGiteaCredentialsOK) IsCode(code int) bool { - return code == 200 -} - -// Code gets the status code for the get gitea credentials o k response -func (o *GetGiteaCredentialsOK) Code() int { - return 200 -} - -func (o *GetGiteaCredentialsOK) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /gitea/credentials/{id}][%d] getGiteaCredentialsOK %s", 200, payload) -} - -func (o *GetGiteaCredentialsOK) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /gitea/credentials/{id}][%d] getGiteaCredentialsOK %s", 200, payload) -} - -func (o *GetGiteaCredentialsOK) GetPayload() garm_params.ForgeCredentials { - return o.Payload -} - -func (o *GetGiteaCredentialsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} - -// NewGetGiteaCredentialsBadRequest creates a GetGiteaCredentialsBadRequest with default headers values -func NewGetGiteaCredentialsBadRequest() *GetGiteaCredentialsBadRequest { - return &GetGiteaCredentialsBadRequest{} -} - -/* -GetGiteaCredentialsBadRequest describes a response with status code 400, with default header values. - -APIErrorResponse -*/ -type GetGiteaCredentialsBadRequest struct { - Payload apiserver_params.APIErrorResponse -} - -// IsSuccess returns true when this get gitea credentials bad request response has a 2xx status code -func (o *GetGiteaCredentialsBadRequest) IsSuccess() bool { - return false -} - -// IsRedirect returns true when this get gitea credentials bad request response has a 3xx status code -func (o *GetGiteaCredentialsBadRequest) IsRedirect() bool { - return false -} - -// IsClientError returns true when this get gitea credentials bad request response has a 4xx status code -func (o *GetGiteaCredentialsBadRequest) IsClientError() bool { - return true -} - -// IsServerError returns true when this get gitea credentials bad request response has a 5xx status code -func (o *GetGiteaCredentialsBadRequest) IsServerError() bool { - return false -} - -// IsCode returns true when this get gitea credentials bad request response a status code equal to that given -func (o *GetGiteaCredentialsBadRequest) IsCode(code int) bool { - return code == 400 -} - -// Code gets the status code for the get gitea credentials bad request response -func (o *GetGiteaCredentialsBadRequest) Code() int { - return 400 -} - -func (o *GetGiteaCredentialsBadRequest) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /gitea/credentials/{id}][%d] getGiteaCredentialsBadRequest %s", 400, payload) -} - -func (o *GetGiteaCredentialsBadRequest) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /gitea/credentials/{id}][%d] getGiteaCredentialsBadRequest %s", 400, payload) -} - -func (o *GetGiteaCredentialsBadRequest) GetPayload() apiserver_params.APIErrorResponse { - return o.Payload -} - -func (o *GetGiteaCredentialsBadRequest) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} diff --git a/client/credentials/list_gitea_credentials_parameters.go b/client/credentials/list_gitea_credentials_parameters.go deleted file mode 100644 index 5e321a88..00000000 --- a/client/credentials/list_gitea_credentials_parameters.go +++ /dev/null @@ -1,128 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package credentials - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - "net/http" - "time" - - "github.com/go-openapi/errors" - "github.com/go-openapi/runtime" - cr "github.com/go-openapi/runtime/client" - "github.com/go-openapi/strfmt" -) - -// NewListGiteaCredentialsParams creates a new ListGiteaCredentialsParams object, -// with the default timeout for this client. -// -// Default values are not hydrated, since defaults are normally applied by the API server side. -// -// To enforce default values in parameter, use SetDefaults or WithDefaults. -func NewListGiteaCredentialsParams() *ListGiteaCredentialsParams { - return &ListGiteaCredentialsParams{ - timeout: cr.DefaultTimeout, - } -} - -// NewListGiteaCredentialsParamsWithTimeout creates a new ListGiteaCredentialsParams object -// with the ability to set a timeout on a request. -func NewListGiteaCredentialsParamsWithTimeout(timeout time.Duration) *ListGiteaCredentialsParams { - return &ListGiteaCredentialsParams{ - timeout: timeout, - } -} - -// NewListGiteaCredentialsParamsWithContext creates a new ListGiteaCredentialsParams object -// with the ability to set a context for a request. -func NewListGiteaCredentialsParamsWithContext(ctx context.Context) *ListGiteaCredentialsParams { - return &ListGiteaCredentialsParams{ - Context: ctx, - } -} - -// NewListGiteaCredentialsParamsWithHTTPClient creates a new ListGiteaCredentialsParams object -// with the ability to set a custom HTTPClient for a request. -func NewListGiteaCredentialsParamsWithHTTPClient(client *http.Client) *ListGiteaCredentialsParams { - return &ListGiteaCredentialsParams{ - HTTPClient: client, - } -} - -/* -ListGiteaCredentialsParams contains all the parameters to send to the API endpoint - - for the list gitea credentials operation. - - Typically these are written to a http.Request. -*/ -type ListGiteaCredentialsParams struct { - timeout time.Duration - Context context.Context - HTTPClient *http.Client -} - -// WithDefaults hydrates default values in the list gitea credentials params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *ListGiteaCredentialsParams) WithDefaults() *ListGiteaCredentialsParams { - o.SetDefaults() - return o -} - -// SetDefaults hydrates default values in the list gitea credentials params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *ListGiteaCredentialsParams) SetDefaults() { - // no default values defined for this parameter -} - -// WithTimeout adds the timeout to the list gitea credentials params -func (o *ListGiteaCredentialsParams) WithTimeout(timeout time.Duration) *ListGiteaCredentialsParams { - o.SetTimeout(timeout) - return o -} - -// SetTimeout adds the timeout to the list gitea credentials params -func (o *ListGiteaCredentialsParams) SetTimeout(timeout time.Duration) { - o.timeout = timeout -} - -// WithContext adds the context to the list gitea credentials params -func (o *ListGiteaCredentialsParams) WithContext(ctx context.Context) *ListGiteaCredentialsParams { - o.SetContext(ctx) - return o -} - -// SetContext adds the context to the list gitea credentials params -func (o *ListGiteaCredentialsParams) SetContext(ctx context.Context) { - o.Context = ctx -} - -// WithHTTPClient adds the HTTPClient to the list gitea credentials params -func (o *ListGiteaCredentialsParams) WithHTTPClient(client *http.Client) *ListGiteaCredentialsParams { - o.SetHTTPClient(client) - return o -} - -// SetHTTPClient adds the HTTPClient to the list gitea credentials params -func (o *ListGiteaCredentialsParams) SetHTTPClient(client *http.Client) { - o.HTTPClient = client -} - -// WriteToRequest writes these params to a swagger request -func (o *ListGiteaCredentialsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { - - if err := r.SetTimeout(o.timeout); err != nil { - return err - } - var res []error - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} diff --git a/client/credentials/list_gitea_credentials_responses.go b/client/credentials/list_gitea_credentials_responses.go deleted file mode 100644 index f27864be..00000000 --- a/client/credentials/list_gitea_credentials_responses.go +++ /dev/null @@ -1,179 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package credentials - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "encoding/json" - "fmt" - "io" - - "github.com/go-openapi/runtime" - "github.com/go-openapi/strfmt" - - apiserver_params "github.com/cloudbase/garm/apiserver/params" - garm_params "github.com/cloudbase/garm/params" -) - -// ListGiteaCredentialsReader is a Reader for the ListGiteaCredentials structure. -type ListGiteaCredentialsReader struct { - formats strfmt.Registry -} - -// ReadResponse reads a server response into the received o. -func (o *ListGiteaCredentialsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { - switch response.Code() { - case 200: - result := NewListGiteaCredentialsOK() - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - return result, nil - case 400: - result := NewListGiteaCredentialsBadRequest() - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - return nil, result - default: - return nil, runtime.NewAPIError("[GET /gitea/credentials] ListGiteaCredentials", response, response.Code()) - } -} - -// NewListGiteaCredentialsOK creates a ListGiteaCredentialsOK with default headers values -func NewListGiteaCredentialsOK() *ListGiteaCredentialsOK { - return &ListGiteaCredentialsOK{} -} - -/* -ListGiteaCredentialsOK describes a response with status code 200, with default header values. - -Credentials -*/ -type ListGiteaCredentialsOK struct { - Payload garm_params.Credentials -} - -// IsSuccess returns true when this list gitea credentials o k response has a 2xx status code -func (o *ListGiteaCredentialsOK) IsSuccess() bool { - return true -} - -// IsRedirect returns true when this list gitea credentials o k response has a 3xx status code -func (o *ListGiteaCredentialsOK) IsRedirect() bool { - return false -} - -// IsClientError returns true when this list gitea credentials o k response has a 4xx status code -func (o *ListGiteaCredentialsOK) IsClientError() bool { - return false -} - -// IsServerError returns true when this list gitea credentials o k response has a 5xx status code -func (o *ListGiteaCredentialsOK) IsServerError() bool { - return false -} - -// IsCode returns true when this list gitea credentials o k response a status code equal to that given -func (o *ListGiteaCredentialsOK) IsCode(code int) bool { - return code == 200 -} - -// Code gets the status code for the list gitea credentials o k response -func (o *ListGiteaCredentialsOK) Code() int { - return 200 -} - -func (o *ListGiteaCredentialsOK) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /gitea/credentials][%d] listGiteaCredentialsOK %s", 200, payload) -} - -func (o *ListGiteaCredentialsOK) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /gitea/credentials][%d] listGiteaCredentialsOK %s", 200, payload) -} - -func (o *ListGiteaCredentialsOK) GetPayload() garm_params.Credentials { - return o.Payload -} - -func (o *ListGiteaCredentialsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} - -// NewListGiteaCredentialsBadRequest creates a ListGiteaCredentialsBadRequest with default headers values -func NewListGiteaCredentialsBadRequest() *ListGiteaCredentialsBadRequest { - return &ListGiteaCredentialsBadRequest{} -} - -/* -ListGiteaCredentialsBadRequest describes a response with status code 400, with default header values. - -APIErrorResponse -*/ -type ListGiteaCredentialsBadRequest struct { - Payload apiserver_params.APIErrorResponse -} - -// IsSuccess returns true when this list gitea credentials bad request response has a 2xx status code -func (o *ListGiteaCredentialsBadRequest) IsSuccess() bool { - return false -} - -// IsRedirect returns true when this list gitea credentials bad request response has a 3xx status code -func (o *ListGiteaCredentialsBadRequest) IsRedirect() bool { - return false -} - -// IsClientError returns true when this list gitea credentials bad request response has a 4xx status code -func (o *ListGiteaCredentialsBadRequest) IsClientError() bool { - return true -} - -// IsServerError returns true when this list gitea credentials bad request response has a 5xx status code -func (o *ListGiteaCredentialsBadRequest) IsServerError() bool { - return false -} - -// IsCode returns true when this list gitea credentials bad request response a status code equal to that given -func (o *ListGiteaCredentialsBadRequest) IsCode(code int) bool { - return code == 400 -} - -// Code gets the status code for the list gitea credentials bad request response -func (o *ListGiteaCredentialsBadRequest) Code() int { - return 400 -} - -func (o *ListGiteaCredentialsBadRequest) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /gitea/credentials][%d] listGiteaCredentialsBadRequest %s", 400, payload) -} - -func (o *ListGiteaCredentialsBadRequest) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /gitea/credentials][%d] listGiteaCredentialsBadRequest %s", 400, payload) -} - -func (o *ListGiteaCredentialsBadRequest) GetPayload() apiserver_params.APIErrorResponse { - return o.Payload -} - -func (o *ListGiteaCredentialsBadRequest) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} diff --git a/client/credentials/update_credentials_responses.go b/client/credentials/update_credentials_responses.go index 6a9f37f8..b0254604 100644 --- a/client/credentials/update_credentials_responses.go +++ b/client/credentials/update_credentials_responses.go @@ -50,10 +50,10 @@ func NewUpdateCredentialsOK() *UpdateCredentialsOK { /* UpdateCredentialsOK describes a response with status code 200, with default header values. -ForgeCredentials +GithubCredentials */ type UpdateCredentialsOK struct { - Payload garm_params.ForgeCredentials + Payload garm_params.GithubCredentials } // IsSuccess returns true when this update credentials o k response has a 2xx status code @@ -96,7 +96,7 @@ func (o *UpdateCredentialsOK) String() string { return fmt.Sprintf("[PUT /github/credentials/{id}][%d] updateCredentialsOK %s", 200, payload) } -func (o *UpdateCredentialsOK) GetPayload() garm_params.ForgeCredentials { +func (o *UpdateCredentialsOK) GetPayload() garm_params.GithubCredentials { return o.Payload } diff --git a/client/credentials/update_gitea_credentials_parameters.go b/client/credentials/update_gitea_credentials_parameters.go deleted file mode 100644 index 1907a0f2..00000000 --- a/client/credentials/update_gitea_credentials_parameters.go +++ /dev/null @@ -1,174 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package credentials - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - "net/http" - "time" - - "github.com/go-openapi/errors" - "github.com/go-openapi/runtime" - cr "github.com/go-openapi/runtime/client" - "github.com/go-openapi/strfmt" - "github.com/go-openapi/swag" - - garm_params "github.com/cloudbase/garm/params" -) - -// NewUpdateGiteaCredentialsParams creates a new UpdateGiteaCredentialsParams object, -// with the default timeout for this client. -// -// Default values are not hydrated, since defaults are normally applied by the API server side. -// -// To enforce default values in parameter, use SetDefaults or WithDefaults. -func NewUpdateGiteaCredentialsParams() *UpdateGiteaCredentialsParams { - return &UpdateGiteaCredentialsParams{ - timeout: cr.DefaultTimeout, - } -} - -// NewUpdateGiteaCredentialsParamsWithTimeout creates a new UpdateGiteaCredentialsParams object -// with the ability to set a timeout on a request. -func NewUpdateGiteaCredentialsParamsWithTimeout(timeout time.Duration) *UpdateGiteaCredentialsParams { - return &UpdateGiteaCredentialsParams{ - timeout: timeout, - } -} - -// NewUpdateGiteaCredentialsParamsWithContext creates a new UpdateGiteaCredentialsParams object -// with the ability to set a context for a request. -func NewUpdateGiteaCredentialsParamsWithContext(ctx context.Context) *UpdateGiteaCredentialsParams { - return &UpdateGiteaCredentialsParams{ - Context: ctx, - } -} - -// NewUpdateGiteaCredentialsParamsWithHTTPClient creates a new UpdateGiteaCredentialsParams object -// with the ability to set a custom HTTPClient for a request. -func NewUpdateGiteaCredentialsParamsWithHTTPClient(client *http.Client) *UpdateGiteaCredentialsParams { - return &UpdateGiteaCredentialsParams{ - HTTPClient: client, - } -} - -/* -UpdateGiteaCredentialsParams contains all the parameters to send to the API endpoint - - for the update gitea credentials operation. - - Typically these are written to a http.Request. -*/ -type UpdateGiteaCredentialsParams struct { - - /* Body. - - Parameters used when updating a Gitea credential. - */ - Body garm_params.UpdateGiteaCredentialsParams - - /* ID. - - ID of the Gitea credential. - */ - ID int64 - - timeout time.Duration - Context context.Context - HTTPClient *http.Client -} - -// WithDefaults hydrates default values in the update gitea credentials params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *UpdateGiteaCredentialsParams) WithDefaults() *UpdateGiteaCredentialsParams { - o.SetDefaults() - return o -} - -// SetDefaults hydrates default values in the update gitea credentials params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *UpdateGiteaCredentialsParams) SetDefaults() { - // no default values defined for this parameter -} - -// WithTimeout adds the timeout to the update gitea credentials params -func (o *UpdateGiteaCredentialsParams) WithTimeout(timeout time.Duration) *UpdateGiteaCredentialsParams { - o.SetTimeout(timeout) - return o -} - -// SetTimeout adds the timeout to the update gitea credentials params -func (o *UpdateGiteaCredentialsParams) SetTimeout(timeout time.Duration) { - o.timeout = timeout -} - -// WithContext adds the context to the update gitea credentials params -func (o *UpdateGiteaCredentialsParams) WithContext(ctx context.Context) *UpdateGiteaCredentialsParams { - o.SetContext(ctx) - return o -} - -// SetContext adds the context to the update gitea credentials params -func (o *UpdateGiteaCredentialsParams) SetContext(ctx context.Context) { - o.Context = ctx -} - -// WithHTTPClient adds the HTTPClient to the update gitea credentials params -func (o *UpdateGiteaCredentialsParams) WithHTTPClient(client *http.Client) *UpdateGiteaCredentialsParams { - o.SetHTTPClient(client) - return o -} - -// SetHTTPClient adds the HTTPClient to the update gitea credentials params -func (o *UpdateGiteaCredentialsParams) SetHTTPClient(client *http.Client) { - o.HTTPClient = client -} - -// WithBody adds the body to the update gitea credentials params -func (o *UpdateGiteaCredentialsParams) WithBody(body garm_params.UpdateGiteaCredentialsParams) *UpdateGiteaCredentialsParams { - o.SetBody(body) - return o -} - -// SetBody adds the body to the update gitea credentials params -func (o *UpdateGiteaCredentialsParams) SetBody(body garm_params.UpdateGiteaCredentialsParams) { - o.Body = body -} - -// WithID adds the id to the update gitea credentials params -func (o *UpdateGiteaCredentialsParams) WithID(id int64) *UpdateGiteaCredentialsParams { - o.SetID(id) - return o -} - -// SetID adds the id to the update gitea credentials params -func (o *UpdateGiteaCredentialsParams) SetID(id int64) { - o.ID = id -} - -// WriteToRequest writes these params to a swagger request -func (o *UpdateGiteaCredentialsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { - - if err := r.SetTimeout(o.timeout); err != nil { - return err - } - var res []error - if err := r.SetBodyParam(o.Body); err != nil { - return err - } - - // path param id - if err := r.SetPathParam("id", swag.FormatInt64(o.ID)); err != nil { - return err - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} diff --git a/client/credentials/update_gitea_credentials_responses.go b/client/credentials/update_gitea_credentials_responses.go deleted file mode 100644 index edbb54d8..00000000 --- a/client/credentials/update_gitea_credentials_responses.go +++ /dev/null @@ -1,179 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package credentials - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "encoding/json" - "fmt" - "io" - - "github.com/go-openapi/runtime" - "github.com/go-openapi/strfmt" - - apiserver_params "github.com/cloudbase/garm/apiserver/params" - garm_params "github.com/cloudbase/garm/params" -) - -// UpdateGiteaCredentialsReader is a Reader for the UpdateGiteaCredentials structure. -type UpdateGiteaCredentialsReader struct { - formats strfmt.Registry -} - -// ReadResponse reads a server response into the received o. -func (o *UpdateGiteaCredentialsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { - switch response.Code() { - case 200: - result := NewUpdateGiteaCredentialsOK() - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - return result, nil - case 400: - result := NewUpdateGiteaCredentialsBadRequest() - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - return nil, result - default: - return nil, runtime.NewAPIError("[PUT /gitea/credentials/{id}] UpdateGiteaCredentials", response, response.Code()) - } -} - -// NewUpdateGiteaCredentialsOK creates a UpdateGiteaCredentialsOK with default headers values -func NewUpdateGiteaCredentialsOK() *UpdateGiteaCredentialsOK { - return &UpdateGiteaCredentialsOK{} -} - -/* -UpdateGiteaCredentialsOK describes a response with status code 200, with default header values. - -ForgeCredentials -*/ -type UpdateGiteaCredentialsOK struct { - Payload garm_params.ForgeCredentials -} - -// IsSuccess returns true when this update gitea credentials o k response has a 2xx status code -func (o *UpdateGiteaCredentialsOK) IsSuccess() bool { - return true -} - -// IsRedirect returns true when this update gitea credentials o k response has a 3xx status code -func (o *UpdateGiteaCredentialsOK) IsRedirect() bool { - return false -} - -// IsClientError returns true when this update gitea credentials o k response has a 4xx status code -func (o *UpdateGiteaCredentialsOK) IsClientError() bool { - return false -} - -// IsServerError returns true when this update gitea credentials o k response has a 5xx status code -func (o *UpdateGiteaCredentialsOK) IsServerError() bool { - return false -} - -// IsCode returns true when this update gitea credentials o k response a status code equal to that given -func (o *UpdateGiteaCredentialsOK) IsCode(code int) bool { - return code == 200 -} - -// Code gets the status code for the update gitea credentials o k response -func (o *UpdateGiteaCredentialsOK) Code() int { - return 200 -} - -func (o *UpdateGiteaCredentialsOK) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[PUT /gitea/credentials/{id}][%d] updateGiteaCredentialsOK %s", 200, payload) -} - -func (o *UpdateGiteaCredentialsOK) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[PUT /gitea/credentials/{id}][%d] updateGiteaCredentialsOK %s", 200, payload) -} - -func (o *UpdateGiteaCredentialsOK) GetPayload() garm_params.ForgeCredentials { - return o.Payload -} - -func (o *UpdateGiteaCredentialsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} - -// NewUpdateGiteaCredentialsBadRequest creates a UpdateGiteaCredentialsBadRequest with default headers values -func NewUpdateGiteaCredentialsBadRequest() *UpdateGiteaCredentialsBadRequest { - return &UpdateGiteaCredentialsBadRequest{} -} - -/* -UpdateGiteaCredentialsBadRequest describes a response with status code 400, with default header values. - -APIErrorResponse -*/ -type UpdateGiteaCredentialsBadRequest struct { - Payload apiserver_params.APIErrorResponse -} - -// IsSuccess returns true when this update gitea credentials bad request response has a 2xx status code -func (o *UpdateGiteaCredentialsBadRequest) IsSuccess() bool { - return false -} - -// IsRedirect returns true when this update gitea credentials bad request response has a 3xx status code -func (o *UpdateGiteaCredentialsBadRequest) IsRedirect() bool { - return false -} - -// IsClientError returns true when this update gitea credentials bad request response has a 4xx status code -func (o *UpdateGiteaCredentialsBadRequest) IsClientError() bool { - return true -} - -// IsServerError returns true when this update gitea credentials bad request response has a 5xx status code -func (o *UpdateGiteaCredentialsBadRequest) IsServerError() bool { - return false -} - -// IsCode returns true when this update gitea credentials bad request response a status code equal to that given -func (o *UpdateGiteaCredentialsBadRequest) IsCode(code int) bool { - return code == 400 -} - -// Code gets the status code for the update gitea credentials bad request response -func (o *UpdateGiteaCredentialsBadRequest) Code() int { - return 400 -} - -func (o *UpdateGiteaCredentialsBadRequest) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[PUT /gitea/credentials/{id}][%d] updateGiteaCredentialsBadRequest %s", 400, payload) -} - -func (o *UpdateGiteaCredentialsBadRequest) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[PUT /gitea/credentials/{id}][%d] updateGiteaCredentialsBadRequest %s", 400, payload) -} - -func (o *UpdateGiteaCredentialsBadRequest) GetPayload() apiserver_params.APIErrorResponse { - return o.Payload -} - -func (o *UpdateGiteaCredentialsBadRequest) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} diff --git a/client/endpoints/create_gitea_endpoint_parameters.go b/client/endpoints/create_gitea_endpoint_parameters.go deleted file mode 100644 index 11dfa73f..00000000 --- a/client/endpoints/create_gitea_endpoint_parameters.go +++ /dev/null @@ -1,151 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package endpoints - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - "net/http" - "time" - - "github.com/go-openapi/errors" - "github.com/go-openapi/runtime" - cr "github.com/go-openapi/runtime/client" - "github.com/go-openapi/strfmt" - - garm_params "github.com/cloudbase/garm/params" -) - -// NewCreateGiteaEndpointParams creates a new CreateGiteaEndpointParams object, -// with the default timeout for this client. -// -// Default values are not hydrated, since defaults are normally applied by the API server side. -// -// To enforce default values in parameter, use SetDefaults or WithDefaults. -func NewCreateGiteaEndpointParams() *CreateGiteaEndpointParams { - return &CreateGiteaEndpointParams{ - timeout: cr.DefaultTimeout, - } -} - -// NewCreateGiteaEndpointParamsWithTimeout creates a new CreateGiteaEndpointParams object -// with the ability to set a timeout on a request. -func NewCreateGiteaEndpointParamsWithTimeout(timeout time.Duration) *CreateGiteaEndpointParams { - return &CreateGiteaEndpointParams{ - timeout: timeout, - } -} - -// NewCreateGiteaEndpointParamsWithContext creates a new CreateGiteaEndpointParams object -// with the ability to set a context for a request. -func NewCreateGiteaEndpointParamsWithContext(ctx context.Context) *CreateGiteaEndpointParams { - return &CreateGiteaEndpointParams{ - Context: ctx, - } -} - -// NewCreateGiteaEndpointParamsWithHTTPClient creates a new CreateGiteaEndpointParams object -// with the ability to set a custom HTTPClient for a request. -func NewCreateGiteaEndpointParamsWithHTTPClient(client *http.Client) *CreateGiteaEndpointParams { - return &CreateGiteaEndpointParams{ - HTTPClient: client, - } -} - -/* -CreateGiteaEndpointParams contains all the parameters to send to the API endpoint - - for the create gitea endpoint operation. - - Typically these are written to a http.Request. -*/ -type CreateGiteaEndpointParams struct { - - /* Body. - - Parameters used when creating a Gitea endpoint. - */ - Body garm_params.CreateGiteaEndpointParams - - timeout time.Duration - Context context.Context - HTTPClient *http.Client -} - -// WithDefaults hydrates default values in the create gitea endpoint params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *CreateGiteaEndpointParams) WithDefaults() *CreateGiteaEndpointParams { - o.SetDefaults() - return o -} - -// SetDefaults hydrates default values in the create gitea endpoint params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *CreateGiteaEndpointParams) SetDefaults() { - // no default values defined for this parameter -} - -// WithTimeout adds the timeout to the create gitea endpoint params -func (o *CreateGiteaEndpointParams) WithTimeout(timeout time.Duration) *CreateGiteaEndpointParams { - o.SetTimeout(timeout) - return o -} - -// SetTimeout adds the timeout to the create gitea endpoint params -func (o *CreateGiteaEndpointParams) SetTimeout(timeout time.Duration) { - o.timeout = timeout -} - -// WithContext adds the context to the create gitea endpoint params -func (o *CreateGiteaEndpointParams) WithContext(ctx context.Context) *CreateGiteaEndpointParams { - o.SetContext(ctx) - return o -} - -// SetContext adds the context to the create gitea endpoint params -func (o *CreateGiteaEndpointParams) SetContext(ctx context.Context) { - o.Context = ctx -} - -// WithHTTPClient adds the HTTPClient to the create gitea endpoint params -func (o *CreateGiteaEndpointParams) WithHTTPClient(client *http.Client) *CreateGiteaEndpointParams { - o.SetHTTPClient(client) - return o -} - -// SetHTTPClient adds the HTTPClient to the create gitea endpoint params -func (o *CreateGiteaEndpointParams) SetHTTPClient(client *http.Client) { - o.HTTPClient = client -} - -// WithBody adds the body to the create gitea endpoint params -func (o *CreateGiteaEndpointParams) WithBody(body garm_params.CreateGiteaEndpointParams) *CreateGiteaEndpointParams { - o.SetBody(body) - return o -} - -// SetBody adds the body to the create gitea endpoint params -func (o *CreateGiteaEndpointParams) SetBody(body garm_params.CreateGiteaEndpointParams) { - o.Body = body -} - -// WriteToRequest writes these params to a swagger request -func (o *CreateGiteaEndpointParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { - - if err := r.SetTimeout(o.timeout); err != nil { - return err - } - var res []error - if err := r.SetBodyParam(o.Body); err != nil { - return err - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} diff --git a/client/endpoints/create_gitea_endpoint_responses.go b/client/endpoints/create_gitea_endpoint_responses.go deleted file mode 100644 index 6e99a973..00000000 --- a/client/endpoints/create_gitea_endpoint_responses.go +++ /dev/null @@ -1,184 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package endpoints - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "encoding/json" - "fmt" - "io" - - "github.com/go-openapi/runtime" - "github.com/go-openapi/strfmt" - - apiserver_params "github.com/cloudbase/garm/apiserver/params" - garm_params "github.com/cloudbase/garm/params" -) - -// CreateGiteaEndpointReader is a Reader for the CreateGiteaEndpoint structure. -type CreateGiteaEndpointReader struct { - formats strfmt.Registry -} - -// ReadResponse reads a server response into the received o. -func (o *CreateGiteaEndpointReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { - switch response.Code() { - case 200: - result := NewCreateGiteaEndpointOK() - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - return result, nil - default: - result := NewCreateGiteaEndpointDefault(response.Code()) - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - if response.Code()/100 == 2 { - return result, nil - } - return nil, result - } -} - -// NewCreateGiteaEndpointOK creates a CreateGiteaEndpointOK with default headers values -func NewCreateGiteaEndpointOK() *CreateGiteaEndpointOK { - return &CreateGiteaEndpointOK{} -} - -/* -CreateGiteaEndpointOK describes a response with status code 200, with default header values. - -ForgeEndpoint -*/ -type CreateGiteaEndpointOK struct { - Payload garm_params.ForgeEndpoint -} - -// IsSuccess returns true when this create gitea endpoint o k response has a 2xx status code -func (o *CreateGiteaEndpointOK) IsSuccess() bool { - return true -} - -// IsRedirect returns true when this create gitea endpoint o k response has a 3xx status code -func (o *CreateGiteaEndpointOK) IsRedirect() bool { - return false -} - -// IsClientError returns true when this create gitea endpoint o k response has a 4xx status code -func (o *CreateGiteaEndpointOK) IsClientError() bool { - return false -} - -// IsServerError returns true when this create gitea endpoint o k response has a 5xx status code -func (o *CreateGiteaEndpointOK) IsServerError() bool { - return false -} - -// IsCode returns true when this create gitea endpoint o k response a status code equal to that given -func (o *CreateGiteaEndpointOK) IsCode(code int) bool { - return code == 200 -} - -// Code gets the status code for the create gitea endpoint o k response -func (o *CreateGiteaEndpointOK) Code() int { - return 200 -} - -func (o *CreateGiteaEndpointOK) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[POST /gitea/endpoints][%d] createGiteaEndpointOK %s", 200, payload) -} - -func (o *CreateGiteaEndpointOK) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[POST /gitea/endpoints][%d] createGiteaEndpointOK %s", 200, payload) -} - -func (o *CreateGiteaEndpointOK) GetPayload() garm_params.ForgeEndpoint { - return o.Payload -} - -func (o *CreateGiteaEndpointOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} - -// NewCreateGiteaEndpointDefault creates a CreateGiteaEndpointDefault with default headers values -func NewCreateGiteaEndpointDefault(code int) *CreateGiteaEndpointDefault { - return &CreateGiteaEndpointDefault{ - _statusCode: code, - } -} - -/* -CreateGiteaEndpointDefault describes a response with status code -1, with default header values. - -APIErrorResponse -*/ -type CreateGiteaEndpointDefault struct { - _statusCode int - - Payload apiserver_params.APIErrorResponse -} - -// IsSuccess returns true when this create gitea endpoint default response has a 2xx status code -func (o *CreateGiteaEndpointDefault) IsSuccess() bool { - return o._statusCode/100 == 2 -} - -// IsRedirect returns true when this create gitea endpoint default response has a 3xx status code -func (o *CreateGiteaEndpointDefault) IsRedirect() bool { - return o._statusCode/100 == 3 -} - -// IsClientError returns true when this create gitea endpoint default response has a 4xx status code -func (o *CreateGiteaEndpointDefault) IsClientError() bool { - return o._statusCode/100 == 4 -} - -// IsServerError returns true when this create gitea endpoint default response has a 5xx status code -func (o *CreateGiteaEndpointDefault) IsServerError() bool { - return o._statusCode/100 == 5 -} - -// IsCode returns true when this create gitea endpoint default response a status code equal to that given -func (o *CreateGiteaEndpointDefault) IsCode(code int) bool { - return o._statusCode == code -} - -// Code gets the status code for the create gitea endpoint default response -func (o *CreateGiteaEndpointDefault) Code() int { - return o._statusCode -} - -func (o *CreateGiteaEndpointDefault) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[POST /gitea/endpoints][%d] CreateGiteaEndpoint default %s", o._statusCode, payload) -} - -func (o *CreateGiteaEndpointDefault) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[POST /gitea/endpoints][%d] CreateGiteaEndpoint default %s", o._statusCode, payload) -} - -func (o *CreateGiteaEndpointDefault) GetPayload() apiserver_params.APIErrorResponse { - return o.Payload -} - -func (o *CreateGiteaEndpointDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} diff --git a/client/endpoints/create_github_endpoint_responses.go b/client/endpoints/create_github_endpoint_responses.go index 60961f3a..acd95088 100644 --- a/client/endpoints/create_github_endpoint_responses.go +++ b/client/endpoints/create_github_endpoint_responses.go @@ -51,10 +51,10 @@ func NewCreateGithubEndpointOK() *CreateGithubEndpointOK { /* CreateGithubEndpointOK describes a response with status code 200, with default header values. -ForgeEndpoint +GithubEndpoint */ type CreateGithubEndpointOK struct { - Payload garm_params.ForgeEndpoint + Payload garm_params.GithubEndpoint } // IsSuccess returns true when this create github endpoint o k response has a 2xx status code @@ -97,7 +97,7 @@ func (o *CreateGithubEndpointOK) String() string { return fmt.Sprintf("[POST /github/endpoints][%d] createGithubEndpointOK %s", 200, payload) } -func (o *CreateGithubEndpointOK) GetPayload() garm_params.ForgeEndpoint { +func (o *CreateGithubEndpointOK) GetPayload() garm_params.GithubEndpoint { return o.Payload } diff --git a/client/endpoints/delete_gitea_endpoint_parameters.go b/client/endpoints/delete_gitea_endpoint_parameters.go deleted file mode 100644 index f7ea5a5d..00000000 --- a/client/endpoints/delete_gitea_endpoint_parameters.go +++ /dev/null @@ -1,151 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package endpoints - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - "net/http" - "time" - - "github.com/go-openapi/errors" - "github.com/go-openapi/runtime" - cr "github.com/go-openapi/runtime/client" - "github.com/go-openapi/strfmt" -) - -// NewDeleteGiteaEndpointParams creates a new DeleteGiteaEndpointParams object, -// with the default timeout for this client. -// -// Default values are not hydrated, since defaults are normally applied by the API server side. -// -// To enforce default values in parameter, use SetDefaults or WithDefaults. -func NewDeleteGiteaEndpointParams() *DeleteGiteaEndpointParams { - return &DeleteGiteaEndpointParams{ - timeout: cr.DefaultTimeout, - } -} - -// NewDeleteGiteaEndpointParamsWithTimeout creates a new DeleteGiteaEndpointParams object -// with the ability to set a timeout on a request. -func NewDeleteGiteaEndpointParamsWithTimeout(timeout time.Duration) *DeleteGiteaEndpointParams { - return &DeleteGiteaEndpointParams{ - timeout: timeout, - } -} - -// NewDeleteGiteaEndpointParamsWithContext creates a new DeleteGiteaEndpointParams object -// with the ability to set a context for a request. -func NewDeleteGiteaEndpointParamsWithContext(ctx context.Context) *DeleteGiteaEndpointParams { - return &DeleteGiteaEndpointParams{ - Context: ctx, - } -} - -// NewDeleteGiteaEndpointParamsWithHTTPClient creates a new DeleteGiteaEndpointParams object -// with the ability to set a custom HTTPClient for a request. -func NewDeleteGiteaEndpointParamsWithHTTPClient(client *http.Client) *DeleteGiteaEndpointParams { - return &DeleteGiteaEndpointParams{ - HTTPClient: client, - } -} - -/* -DeleteGiteaEndpointParams contains all the parameters to send to the API endpoint - - for the delete gitea endpoint operation. - - Typically these are written to a http.Request. -*/ -type DeleteGiteaEndpointParams struct { - - /* Name. - - The name of the Gitea endpoint. - */ - Name string - - timeout time.Duration - Context context.Context - HTTPClient *http.Client -} - -// WithDefaults hydrates default values in the delete gitea endpoint params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *DeleteGiteaEndpointParams) WithDefaults() *DeleteGiteaEndpointParams { - o.SetDefaults() - return o -} - -// SetDefaults hydrates default values in the delete gitea endpoint params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *DeleteGiteaEndpointParams) SetDefaults() { - // no default values defined for this parameter -} - -// WithTimeout adds the timeout to the delete gitea endpoint params -func (o *DeleteGiteaEndpointParams) WithTimeout(timeout time.Duration) *DeleteGiteaEndpointParams { - o.SetTimeout(timeout) - return o -} - -// SetTimeout adds the timeout to the delete gitea endpoint params -func (o *DeleteGiteaEndpointParams) SetTimeout(timeout time.Duration) { - o.timeout = timeout -} - -// WithContext adds the context to the delete gitea endpoint params -func (o *DeleteGiteaEndpointParams) WithContext(ctx context.Context) *DeleteGiteaEndpointParams { - o.SetContext(ctx) - return o -} - -// SetContext adds the context to the delete gitea endpoint params -func (o *DeleteGiteaEndpointParams) SetContext(ctx context.Context) { - o.Context = ctx -} - -// WithHTTPClient adds the HTTPClient to the delete gitea endpoint params -func (o *DeleteGiteaEndpointParams) WithHTTPClient(client *http.Client) *DeleteGiteaEndpointParams { - o.SetHTTPClient(client) - return o -} - -// SetHTTPClient adds the HTTPClient to the delete gitea endpoint params -func (o *DeleteGiteaEndpointParams) SetHTTPClient(client *http.Client) { - o.HTTPClient = client -} - -// WithName adds the name to the delete gitea endpoint params -func (o *DeleteGiteaEndpointParams) WithName(name string) *DeleteGiteaEndpointParams { - o.SetName(name) - return o -} - -// SetName adds the name to the delete gitea endpoint params -func (o *DeleteGiteaEndpointParams) SetName(name string) { - o.Name = name -} - -// WriteToRequest writes these params to a swagger request -func (o *DeleteGiteaEndpointParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { - - if err := r.SetTimeout(o.timeout); err != nil { - return err - } - var res []error - - // path param name - if err := r.SetPathParam("name", o.Name); err != nil { - return err - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} diff --git a/client/endpoints/delete_gitea_endpoint_responses.go b/client/endpoints/delete_gitea_endpoint_responses.go deleted file mode 100644 index 787d6585..00000000 --- a/client/endpoints/delete_gitea_endpoint_responses.go +++ /dev/null @@ -1,106 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package endpoints - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "encoding/json" - "fmt" - "io" - - "github.com/go-openapi/runtime" - "github.com/go-openapi/strfmt" - - apiserver_params "github.com/cloudbase/garm/apiserver/params" -) - -// DeleteGiteaEndpointReader is a Reader for the DeleteGiteaEndpoint structure. -type DeleteGiteaEndpointReader struct { - formats strfmt.Registry -} - -// ReadResponse reads a server response into the received o. -func (o *DeleteGiteaEndpointReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { - result := NewDeleteGiteaEndpointDefault(response.Code()) - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - if response.Code()/100 == 2 { - return result, nil - } - return nil, result -} - -// NewDeleteGiteaEndpointDefault creates a DeleteGiteaEndpointDefault with default headers values -func NewDeleteGiteaEndpointDefault(code int) *DeleteGiteaEndpointDefault { - return &DeleteGiteaEndpointDefault{ - _statusCode: code, - } -} - -/* -DeleteGiteaEndpointDefault describes a response with status code -1, with default header values. - -APIErrorResponse -*/ -type DeleteGiteaEndpointDefault struct { - _statusCode int - - Payload apiserver_params.APIErrorResponse -} - -// IsSuccess returns true when this delete gitea endpoint default response has a 2xx status code -func (o *DeleteGiteaEndpointDefault) IsSuccess() bool { - return o._statusCode/100 == 2 -} - -// IsRedirect returns true when this delete gitea endpoint default response has a 3xx status code -func (o *DeleteGiteaEndpointDefault) IsRedirect() bool { - return o._statusCode/100 == 3 -} - -// IsClientError returns true when this delete gitea endpoint default response has a 4xx status code -func (o *DeleteGiteaEndpointDefault) IsClientError() bool { - return o._statusCode/100 == 4 -} - -// IsServerError returns true when this delete gitea endpoint default response has a 5xx status code -func (o *DeleteGiteaEndpointDefault) IsServerError() bool { - return o._statusCode/100 == 5 -} - -// IsCode returns true when this delete gitea endpoint default response a status code equal to that given -func (o *DeleteGiteaEndpointDefault) IsCode(code int) bool { - return o._statusCode == code -} - -// Code gets the status code for the delete gitea endpoint default response -func (o *DeleteGiteaEndpointDefault) Code() int { - return o._statusCode -} - -func (o *DeleteGiteaEndpointDefault) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[DELETE /gitea/endpoints/{name}][%d] DeleteGiteaEndpoint default %s", o._statusCode, payload) -} - -func (o *DeleteGiteaEndpointDefault) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[DELETE /gitea/endpoints/{name}][%d] DeleteGiteaEndpoint default %s", o._statusCode, payload) -} - -func (o *DeleteGiteaEndpointDefault) GetPayload() apiserver_params.APIErrorResponse { - return o.Payload -} - -func (o *DeleteGiteaEndpointDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} diff --git a/client/endpoints/endpoints_client.go b/client/endpoints/endpoints_client.go index 74019577..9b951b2c 100644 --- a/client/endpoints/endpoints_client.go +++ b/client/endpoints/endpoints_client.go @@ -54,67 +54,19 @@ type ClientOption func(*runtime.ClientOperation) // ClientService is the interface for Client methods type ClientService interface { - CreateGiteaEndpoint(params *CreateGiteaEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateGiteaEndpointOK, error) - CreateGithubEndpoint(params *CreateGithubEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateGithubEndpointOK, error) - DeleteGiteaEndpoint(params *DeleteGiteaEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error - DeleteGithubEndpoint(params *DeleteGithubEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error - GetGiteaEndpoint(params *GetGiteaEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*GetGiteaEndpointOK, error) - GetGithubEndpoint(params *GetGithubEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*GetGithubEndpointOK, error) - ListGiteaEndpoints(params *ListGiteaEndpointsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListGiteaEndpointsOK, error) - ListGithubEndpoints(params *ListGithubEndpointsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListGithubEndpointsOK, error) - UpdateGiteaEndpoint(params *UpdateGiteaEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UpdateGiteaEndpointOK, error) - UpdateGithubEndpoint(params *UpdateGithubEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UpdateGithubEndpointOK, error) SetTransport(transport runtime.ClientTransport) } -/* -CreateGiteaEndpoint creates a gitea endpoint -*/ -func (a *Client) CreateGiteaEndpoint(params *CreateGiteaEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateGiteaEndpointOK, error) { - // TODO: Validate the params before sending - if params == nil { - params = NewCreateGiteaEndpointParams() - } - op := &runtime.ClientOperation{ - ID: "CreateGiteaEndpoint", - Method: "POST", - PathPattern: "/gitea/endpoints", - ProducesMediaTypes: []string{"application/json"}, - ConsumesMediaTypes: []string{"application/json"}, - Schemes: []string{"http"}, - Params: params, - Reader: &CreateGiteaEndpointReader{formats: a.formats}, - AuthInfo: authInfo, - Context: params.Context, - Client: params.HTTPClient, - } - for _, opt := range opts { - opt(op) - } - - result, err := a.transport.Submit(op) - if err != nil { - return nil, err - } - success, ok := result.(*CreateGiteaEndpointOK) - if ok { - return success, nil - } - // unexpected success response - unexpectedSuccess := result.(*CreateGiteaEndpointDefault) - return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) -} - /* CreateGithubEndpoint creates a git hub endpoint */ @@ -153,38 +105,6 @@ func (a *Client) CreateGithubEndpoint(params *CreateGithubEndpointParams, authIn return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } -/* -DeleteGiteaEndpoint deletes a gitea endpoint -*/ -func (a *Client) DeleteGiteaEndpoint(params *DeleteGiteaEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error { - // TODO: Validate the params before sending - if params == nil { - params = NewDeleteGiteaEndpointParams() - } - op := &runtime.ClientOperation{ - ID: "DeleteGiteaEndpoint", - Method: "DELETE", - PathPattern: "/gitea/endpoints/{name}", - ProducesMediaTypes: []string{"application/json"}, - ConsumesMediaTypes: []string{"application/json"}, - Schemes: []string{"http"}, - Params: params, - Reader: &DeleteGiteaEndpointReader{formats: a.formats}, - AuthInfo: authInfo, - Context: params.Context, - Client: params.HTTPClient, - } - for _, opt := range opts { - opt(op) - } - - _, err := a.transport.Submit(op) - if err != nil { - return err - } - return nil -} - /* DeleteGithubEndpoint deletes a git hub endpoint */ @@ -217,44 +137,6 @@ func (a *Client) DeleteGithubEndpoint(params *DeleteGithubEndpointParams, authIn return nil } -/* -GetGiteaEndpoint gets a gitea endpoint -*/ -func (a *Client) GetGiteaEndpoint(params *GetGiteaEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*GetGiteaEndpointOK, error) { - // TODO: Validate the params before sending - if params == nil { - params = NewGetGiteaEndpointParams() - } - op := &runtime.ClientOperation{ - ID: "GetGiteaEndpoint", - Method: "GET", - PathPattern: "/gitea/endpoints/{name}", - ProducesMediaTypes: []string{"application/json"}, - ConsumesMediaTypes: []string{"application/json"}, - Schemes: []string{"http"}, - Params: params, - Reader: &GetGiteaEndpointReader{formats: a.formats}, - AuthInfo: authInfo, - Context: params.Context, - Client: params.HTTPClient, - } - for _, opt := range opts { - opt(op) - } - - result, err := a.transport.Submit(op) - if err != nil { - return nil, err - } - success, ok := result.(*GetGiteaEndpointOK) - if ok { - return success, nil - } - // unexpected success response - unexpectedSuccess := result.(*GetGiteaEndpointDefault) - return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) -} - /* GetGithubEndpoint gets a git hub endpoint */ @@ -293,44 +175,6 @@ func (a *Client) GetGithubEndpoint(params *GetGithubEndpointParams, authInfo run return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } -/* -ListGiteaEndpoints lists all gitea endpoints -*/ -func (a *Client) ListGiteaEndpoints(params *ListGiteaEndpointsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListGiteaEndpointsOK, error) { - // TODO: Validate the params before sending - if params == nil { - params = NewListGiteaEndpointsParams() - } - op := &runtime.ClientOperation{ - ID: "ListGiteaEndpoints", - Method: "GET", - PathPattern: "/gitea/endpoints", - ProducesMediaTypes: []string{"application/json"}, - ConsumesMediaTypes: []string{"application/json"}, - Schemes: []string{"http"}, - Params: params, - Reader: &ListGiteaEndpointsReader{formats: a.formats}, - AuthInfo: authInfo, - Context: params.Context, - Client: params.HTTPClient, - } - for _, opt := range opts { - opt(op) - } - - result, err := a.transport.Submit(op) - if err != nil { - return nil, err - } - success, ok := result.(*ListGiteaEndpointsOK) - if ok { - return success, nil - } - // unexpected success response - unexpectedSuccess := result.(*ListGiteaEndpointsDefault) - return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) -} - /* ListGithubEndpoints lists all git hub endpoints */ @@ -369,44 +213,6 @@ func (a *Client) ListGithubEndpoints(params *ListGithubEndpointsParams, authInfo return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } -/* -UpdateGiteaEndpoint updates a gitea endpoint -*/ -func (a *Client) UpdateGiteaEndpoint(params *UpdateGiteaEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UpdateGiteaEndpointOK, error) { - // TODO: Validate the params before sending - if params == nil { - params = NewUpdateGiteaEndpointParams() - } - op := &runtime.ClientOperation{ - ID: "UpdateGiteaEndpoint", - Method: "PUT", - PathPattern: "/gitea/endpoints/{name}", - ProducesMediaTypes: []string{"application/json"}, - ConsumesMediaTypes: []string{"application/json"}, - Schemes: []string{"http"}, - Params: params, - Reader: &UpdateGiteaEndpointReader{formats: a.formats}, - AuthInfo: authInfo, - Context: params.Context, - Client: params.HTTPClient, - } - for _, opt := range opts { - opt(op) - } - - result, err := a.transport.Submit(op) - if err != nil { - return nil, err - } - success, ok := result.(*UpdateGiteaEndpointOK) - if ok { - return success, nil - } - // unexpected success response - unexpectedSuccess := result.(*UpdateGiteaEndpointDefault) - return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) -} - /* UpdateGithubEndpoint updates a git hub endpoint */ diff --git a/client/endpoints/get_gitea_endpoint_parameters.go b/client/endpoints/get_gitea_endpoint_parameters.go deleted file mode 100644 index 0d7f883b..00000000 --- a/client/endpoints/get_gitea_endpoint_parameters.go +++ /dev/null @@ -1,151 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package endpoints - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - "net/http" - "time" - - "github.com/go-openapi/errors" - "github.com/go-openapi/runtime" - cr "github.com/go-openapi/runtime/client" - "github.com/go-openapi/strfmt" -) - -// NewGetGiteaEndpointParams creates a new GetGiteaEndpointParams object, -// with the default timeout for this client. -// -// Default values are not hydrated, since defaults are normally applied by the API server side. -// -// To enforce default values in parameter, use SetDefaults or WithDefaults. -func NewGetGiteaEndpointParams() *GetGiteaEndpointParams { - return &GetGiteaEndpointParams{ - timeout: cr.DefaultTimeout, - } -} - -// NewGetGiteaEndpointParamsWithTimeout creates a new GetGiteaEndpointParams object -// with the ability to set a timeout on a request. -func NewGetGiteaEndpointParamsWithTimeout(timeout time.Duration) *GetGiteaEndpointParams { - return &GetGiteaEndpointParams{ - timeout: timeout, - } -} - -// NewGetGiteaEndpointParamsWithContext creates a new GetGiteaEndpointParams object -// with the ability to set a context for a request. -func NewGetGiteaEndpointParamsWithContext(ctx context.Context) *GetGiteaEndpointParams { - return &GetGiteaEndpointParams{ - Context: ctx, - } -} - -// NewGetGiteaEndpointParamsWithHTTPClient creates a new GetGiteaEndpointParams object -// with the ability to set a custom HTTPClient for a request. -func NewGetGiteaEndpointParamsWithHTTPClient(client *http.Client) *GetGiteaEndpointParams { - return &GetGiteaEndpointParams{ - HTTPClient: client, - } -} - -/* -GetGiteaEndpointParams contains all the parameters to send to the API endpoint - - for the get gitea endpoint operation. - - Typically these are written to a http.Request. -*/ -type GetGiteaEndpointParams struct { - - /* Name. - - The name of the Gitea endpoint. - */ - Name string - - timeout time.Duration - Context context.Context - HTTPClient *http.Client -} - -// WithDefaults hydrates default values in the get gitea endpoint params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *GetGiteaEndpointParams) WithDefaults() *GetGiteaEndpointParams { - o.SetDefaults() - return o -} - -// SetDefaults hydrates default values in the get gitea endpoint params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *GetGiteaEndpointParams) SetDefaults() { - // no default values defined for this parameter -} - -// WithTimeout adds the timeout to the get gitea endpoint params -func (o *GetGiteaEndpointParams) WithTimeout(timeout time.Duration) *GetGiteaEndpointParams { - o.SetTimeout(timeout) - return o -} - -// SetTimeout adds the timeout to the get gitea endpoint params -func (o *GetGiteaEndpointParams) SetTimeout(timeout time.Duration) { - o.timeout = timeout -} - -// WithContext adds the context to the get gitea endpoint params -func (o *GetGiteaEndpointParams) WithContext(ctx context.Context) *GetGiteaEndpointParams { - o.SetContext(ctx) - return o -} - -// SetContext adds the context to the get gitea endpoint params -func (o *GetGiteaEndpointParams) SetContext(ctx context.Context) { - o.Context = ctx -} - -// WithHTTPClient adds the HTTPClient to the get gitea endpoint params -func (o *GetGiteaEndpointParams) WithHTTPClient(client *http.Client) *GetGiteaEndpointParams { - o.SetHTTPClient(client) - return o -} - -// SetHTTPClient adds the HTTPClient to the get gitea endpoint params -func (o *GetGiteaEndpointParams) SetHTTPClient(client *http.Client) { - o.HTTPClient = client -} - -// WithName adds the name to the get gitea endpoint params -func (o *GetGiteaEndpointParams) WithName(name string) *GetGiteaEndpointParams { - o.SetName(name) - return o -} - -// SetName adds the name to the get gitea endpoint params -func (o *GetGiteaEndpointParams) SetName(name string) { - o.Name = name -} - -// WriteToRequest writes these params to a swagger request -func (o *GetGiteaEndpointParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { - - if err := r.SetTimeout(o.timeout); err != nil { - return err - } - var res []error - - // path param name - if err := r.SetPathParam("name", o.Name); err != nil { - return err - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} diff --git a/client/endpoints/get_gitea_endpoint_responses.go b/client/endpoints/get_gitea_endpoint_responses.go deleted file mode 100644 index e4bacd03..00000000 --- a/client/endpoints/get_gitea_endpoint_responses.go +++ /dev/null @@ -1,184 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package endpoints - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "encoding/json" - "fmt" - "io" - - "github.com/go-openapi/runtime" - "github.com/go-openapi/strfmt" - - apiserver_params "github.com/cloudbase/garm/apiserver/params" - garm_params "github.com/cloudbase/garm/params" -) - -// GetGiteaEndpointReader is a Reader for the GetGiteaEndpoint structure. -type GetGiteaEndpointReader struct { - formats strfmt.Registry -} - -// ReadResponse reads a server response into the received o. -func (o *GetGiteaEndpointReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { - switch response.Code() { - case 200: - result := NewGetGiteaEndpointOK() - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - return result, nil - default: - result := NewGetGiteaEndpointDefault(response.Code()) - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - if response.Code()/100 == 2 { - return result, nil - } - return nil, result - } -} - -// NewGetGiteaEndpointOK creates a GetGiteaEndpointOK with default headers values -func NewGetGiteaEndpointOK() *GetGiteaEndpointOK { - return &GetGiteaEndpointOK{} -} - -/* -GetGiteaEndpointOK describes a response with status code 200, with default header values. - -ForgeEndpoint -*/ -type GetGiteaEndpointOK struct { - Payload garm_params.ForgeEndpoint -} - -// IsSuccess returns true when this get gitea endpoint o k response has a 2xx status code -func (o *GetGiteaEndpointOK) IsSuccess() bool { - return true -} - -// IsRedirect returns true when this get gitea endpoint o k response has a 3xx status code -func (o *GetGiteaEndpointOK) IsRedirect() bool { - return false -} - -// IsClientError returns true when this get gitea endpoint o k response has a 4xx status code -func (o *GetGiteaEndpointOK) IsClientError() bool { - return false -} - -// IsServerError returns true when this get gitea endpoint o k response has a 5xx status code -func (o *GetGiteaEndpointOK) IsServerError() bool { - return false -} - -// IsCode returns true when this get gitea endpoint o k response a status code equal to that given -func (o *GetGiteaEndpointOK) IsCode(code int) bool { - return code == 200 -} - -// Code gets the status code for the get gitea endpoint o k response -func (o *GetGiteaEndpointOK) Code() int { - return 200 -} - -func (o *GetGiteaEndpointOK) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /gitea/endpoints/{name}][%d] getGiteaEndpointOK %s", 200, payload) -} - -func (o *GetGiteaEndpointOK) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /gitea/endpoints/{name}][%d] getGiteaEndpointOK %s", 200, payload) -} - -func (o *GetGiteaEndpointOK) GetPayload() garm_params.ForgeEndpoint { - return o.Payload -} - -func (o *GetGiteaEndpointOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} - -// NewGetGiteaEndpointDefault creates a GetGiteaEndpointDefault with default headers values -func NewGetGiteaEndpointDefault(code int) *GetGiteaEndpointDefault { - return &GetGiteaEndpointDefault{ - _statusCode: code, - } -} - -/* -GetGiteaEndpointDefault describes a response with status code -1, with default header values. - -APIErrorResponse -*/ -type GetGiteaEndpointDefault struct { - _statusCode int - - Payload apiserver_params.APIErrorResponse -} - -// IsSuccess returns true when this get gitea endpoint default response has a 2xx status code -func (o *GetGiteaEndpointDefault) IsSuccess() bool { - return o._statusCode/100 == 2 -} - -// IsRedirect returns true when this get gitea endpoint default response has a 3xx status code -func (o *GetGiteaEndpointDefault) IsRedirect() bool { - return o._statusCode/100 == 3 -} - -// IsClientError returns true when this get gitea endpoint default response has a 4xx status code -func (o *GetGiteaEndpointDefault) IsClientError() bool { - return o._statusCode/100 == 4 -} - -// IsServerError returns true when this get gitea endpoint default response has a 5xx status code -func (o *GetGiteaEndpointDefault) IsServerError() bool { - return o._statusCode/100 == 5 -} - -// IsCode returns true when this get gitea endpoint default response a status code equal to that given -func (o *GetGiteaEndpointDefault) IsCode(code int) bool { - return o._statusCode == code -} - -// Code gets the status code for the get gitea endpoint default response -func (o *GetGiteaEndpointDefault) Code() int { - return o._statusCode -} - -func (o *GetGiteaEndpointDefault) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /gitea/endpoints/{name}][%d] GetGiteaEndpoint default %s", o._statusCode, payload) -} - -func (o *GetGiteaEndpointDefault) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /gitea/endpoints/{name}][%d] GetGiteaEndpoint default %s", o._statusCode, payload) -} - -func (o *GetGiteaEndpointDefault) GetPayload() apiserver_params.APIErrorResponse { - return o.Payload -} - -func (o *GetGiteaEndpointDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} diff --git a/client/endpoints/get_github_endpoint_responses.go b/client/endpoints/get_github_endpoint_responses.go index e2b97a60..d84f9280 100644 --- a/client/endpoints/get_github_endpoint_responses.go +++ b/client/endpoints/get_github_endpoint_responses.go @@ -51,10 +51,10 @@ func NewGetGithubEndpointOK() *GetGithubEndpointOK { /* GetGithubEndpointOK describes a response with status code 200, with default header values. -ForgeEndpoint +GithubEndpoint */ type GetGithubEndpointOK struct { - Payload garm_params.ForgeEndpoint + Payload garm_params.GithubEndpoint } // IsSuccess returns true when this get github endpoint o k response has a 2xx status code @@ -97,7 +97,7 @@ func (o *GetGithubEndpointOK) String() string { return fmt.Sprintf("[GET /github/endpoints/{name}][%d] getGithubEndpointOK %s", 200, payload) } -func (o *GetGithubEndpointOK) GetPayload() garm_params.ForgeEndpoint { +func (o *GetGithubEndpointOK) GetPayload() garm_params.GithubEndpoint { return o.Payload } diff --git a/client/endpoints/list_gitea_endpoints_parameters.go b/client/endpoints/list_gitea_endpoints_parameters.go deleted file mode 100644 index 93ec6ae6..00000000 --- a/client/endpoints/list_gitea_endpoints_parameters.go +++ /dev/null @@ -1,128 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package endpoints - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - "net/http" - "time" - - "github.com/go-openapi/errors" - "github.com/go-openapi/runtime" - cr "github.com/go-openapi/runtime/client" - "github.com/go-openapi/strfmt" -) - -// NewListGiteaEndpointsParams creates a new ListGiteaEndpointsParams object, -// with the default timeout for this client. -// -// Default values are not hydrated, since defaults are normally applied by the API server side. -// -// To enforce default values in parameter, use SetDefaults or WithDefaults. -func NewListGiteaEndpointsParams() *ListGiteaEndpointsParams { - return &ListGiteaEndpointsParams{ - timeout: cr.DefaultTimeout, - } -} - -// NewListGiteaEndpointsParamsWithTimeout creates a new ListGiteaEndpointsParams object -// with the ability to set a timeout on a request. -func NewListGiteaEndpointsParamsWithTimeout(timeout time.Duration) *ListGiteaEndpointsParams { - return &ListGiteaEndpointsParams{ - timeout: timeout, - } -} - -// NewListGiteaEndpointsParamsWithContext creates a new ListGiteaEndpointsParams object -// with the ability to set a context for a request. -func NewListGiteaEndpointsParamsWithContext(ctx context.Context) *ListGiteaEndpointsParams { - return &ListGiteaEndpointsParams{ - Context: ctx, - } -} - -// NewListGiteaEndpointsParamsWithHTTPClient creates a new ListGiteaEndpointsParams object -// with the ability to set a custom HTTPClient for a request. -func NewListGiteaEndpointsParamsWithHTTPClient(client *http.Client) *ListGiteaEndpointsParams { - return &ListGiteaEndpointsParams{ - HTTPClient: client, - } -} - -/* -ListGiteaEndpointsParams contains all the parameters to send to the API endpoint - - for the list gitea endpoints operation. - - Typically these are written to a http.Request. -*/ -type ListGiteaEndpointsParams struct { - timeout time.Duration - Context context.Context - HTTPClient *http.Client -} - -// WithDefaults hydrates default values in the list gitea endpoints params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *ListGiteaEndpointsParams) WithDefaults() *ListGiteaEndpointsParams { - o.SetDefaults() - return o -} - -// SetDefaults hydrates default values in the list gitea endpoints params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *ListGiteaEndpointsParams) SetDefaults() { - // no default values defined for this parameter -} - -// WithTimeout adds the timeout to the list gitea endpoints params -func (o *ListGiteaEndpointsParams) WithTimeout(timeout time.Duration) *ListGiteaEndpointsParams { - o.SetTimeout(timeout) - return o -} - -// SetTimeout adds the timeout to the list gitea endpoints params -func (o *ListGiteaEndpointsParams) SetTimeout(timeout time.Duration) { - o.timeout = timeout -} - -// WithContext adds the context to the list gitea endpoints params -func (o *ListGiteaEndpointsParams) WithContext(ctx context.Context) *ListGiteaEndpointsParams { - o.SetContext(ctx) - return o -} - -// SetContext adds the context to the list gitea endpoints params -func (o *ListGiteaEndpointsParams) SetContext(ctx context.Context) { - o.Context = ctx -} - -// WithHTTPClient adds the HTTPClient to the list gitea endpoints params -func (o *ListGiteaEndpointsParams) WithHTTPClient(client *http.Client) *ListGiteaEndpointsParams { - o.SetHTTPClient(client) - return o -} - -// SetHTTPClient adds the HTTPClient to the list gitea endpoints params -func (o *ListGiteaEndpointsParams) SetHTTPClient(client *http.Client) { - o.HTTPClient = client -} - -// WriteToRequest writes these params to a swagger request -func (o *ListGiteaEndpointsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { - - if err := r.SetTimeout(o.timeout); err != nil { - return err - } - var res []error - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} diff --git a/client/endpoints/list_gitea_endpoints_responses.go b/client/endpoints/list_gitea_endpoints_responses.go deleted file mode 100644 index 0fdd90ec..00000000 --- a/client/endpoints/list_gitea_endpoints_responses.go +++ /dev/null @@ -1,184 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package endpoints - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "encoding/json" - "fmt" - "io" - - "github.com/go-openapi/runtime" - "github.com/go-openapi/strfmt" - - apiserver_params "github.com/cloudbase/garm/apiserver/params" - garm_params "github.com/cloudbase/garm/params" -) - -// ListGiteaEndpointsReader is a Reader for the ListGiteaEndpoints structure. -type ListGiteaEndpointsReader struct { - formats strfmt.Registry -} - -// ReadResponse reads a server response into the received o. -func (o *ListGiteaEndpointsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { - switch response.Code() { - case 200: - result := NewListGiteaEndpointsOK() - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - return result, nil - default: - result := NewListGiteaEndpointsDefault(response.Code()) - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - if response.Code()/100 == 2 { - return result, nil - } - return nil, result - } -} - -// NewListGiteaEndpointsOK creates a ListGiteaEndpointsOK with default headers values -func NewListGiteaEndpointsOK() *ListGiteaEndpointsOK { - return &ListGiteaEndpointsOK{} -} - -/* -ListGiteaEndpointsOK describes a response with status code 200, with default header values. - -ForgeEndpoints -*/ -type ListGiteaEndpointsOK struct { - Payload garm_params.ForgeEndpoints -} - -// IsSuccess returns true when this list gitea endpoints o k response has a 2xx status code -func (o *ListGiteaEndpointsOK) IsSuccess() bool { - return true -} - -// IsRedirect returns true when this list gitea endpoints o k response has a 3xx status code -func (o *ListGiteaEndpointsOK) IsRedirect() bool { - return false -} - -// IsClientError returns true when this list gitea endpoints o k response has a 4xx status code -func (o *ListGiteaEndpointsOK) IsClientError() bool { - return false -} - -// IsServerError returns true when this list gitea endpoints o k response has a 5xx status code -func (o *ListGiteaEndpointsOK) IsServerError() bool { - return false -} - -// IsCode returns true when this list gitea endpoints o k response a status code equal to that given -func (o *ListGiteaEndpointsOK) IsCode(code int) bool { - return code == 200 -} - -// Code gets the status code for the list gitea endpoints o k response -func (o *ListGiteaEndpointsOK) Code() int { - return 200 -} - -func (o *ListGiteaEndpointsOK) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /gitea/endpoints][%d] listGiteaEndpointsOK %s", 200, payload) -} - -func (o *ListGiteaEndpointsOK) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /gitea/endpoints][%d] listGiteaEndpointsOK %s", 200, payload) -} - -func (o *ListGiteaEndpointsOK) GetPayload() garm_params.ForgeEndpoints { - return o.Payload -} - -func (o *ListGiteaEndpointsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} - -// NewListGiteaEndpointsDefault creates a ListGiteaEndpointsDefault with default headers values -func NewListGiteaEndpointsDefault(code int) *ListGiteaEndpointsDefault { - return &ListGiteaEndpointsDefault{ - _statusCode: code, - } -} - -/* -ListGiteaEndpointsDefault describes a response with status code -1, with default header values. - -APIErrorResponse -*/ -type ListGiteaEndpointsDefault struct { - _statusCode int - - Payload apiserver_params.APIErrorResponse -} - -// IsSuccess returns true when this list gitea endpoints default response has a 2xx status code -func (o *ListGiteaEndpointsDefault) IsSuccess() bool { - return o._statusCode/100 == 2 -} - -// IsRedirect returns true when this list gitea endpoints default response has a 3xx status code -func (o *ListGiteaEndpointsDefault) IsRedirect() bool { - return o._statusCode/100 == 3 -} - -// IsClientError returns true when this list gitea endpoints default response has a 4xx status code -func (o *ListGiteaEndpointsDefault) IsClientError() bool { - return o._statusCode/100 == 4 -} - -// IsServerError returns true when this list gitea endpoints default response has a 5xx status code -func (o *ListGiteaEndpointsDefault) IsServerError() bool { - return o._statusCode/100 == 5 -} - -// IsCode returns true when this list gitea endpoints default response a status code equal to that given -func (o *ListGiteaEndpointsDefault) IsCode(code int) bool { - return o._statusCode == code -} - -// Code gets the status code for the list gitea endpoints default response -func (o *ListGiteaEndpointsDefault) Code() int { - return o._statusCode -} - -func (o *ListGiteaEndpointsDefault) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /gitea/endpoints][%d] ListGiteaEndpoints default %s", o._statusCode, payload) -} - -func (o *ListGiteaEndpointsDefault) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /gitea/endpoints][%d] ListGiteaEndpoints default %s", o._statusCode, payload) -} - -func (o *ListGiteaEndpointsDefault) GetPayload() apiserver_params.APIErrorResponse { - return o.Payload -} - -func (o *ListGiteaEndpointsDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} diff --git a/client/endpoints/list_github_endpoints_responses.go b/client/endpoints/list_github_endpoints_responses.go index 33485f9b..6c2dde6c 100644 --- a/client/endpoints/list_github_endpoints_responses.go +++ b/client/endpoints/list_github_endpoints_responses.go @@ -51,10 +51,10 @@ func NewListGithubEndpointsOK() *ListGithubEndpointsOK { /* ListGithubEndpointsOK describes a response with status code 200, with default header values. -ForgeEndpoints +GithubEndpoints */ type ListGithubEndpointsOK struct { - Payload garm_params.ForgeEndpoints + Payload garm_params.GithubEndpoints } // IsSuccess returns true when this list github endpoints o k response has a 2xx status code @@ -97,7 +97,7 @@ func (o *ListGithubEndpointsOK) String() string { return fmt.Sprintf("[GET /github/endpoints][%d] listGithubEndpointsOK %s", 200, payload) } -func (o *ListGithubEndpointsOK) GetPayload() garm_params.ForgeEndpoints { +func (o *ListGithubEndpointsOK) GetPayload() garm_params.GithubEndpoints { return o.Payload } diff --git a/client/endpoints/update_gitea_endpoint_parameters.go b/client/endpoints/update_gitea_endpoint_parameters.go deleted file mode 100644 index bfd18e2e..00000000 --- a/client/endpoints/update_gitea_endpoint_parameters.go +++ /dev/null @@ -1,173 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package endpoints - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - "net/http" - "time" - - "github.com/go-openapi/errors" - "github.com/go-openapi/runtime" - cr "github.com/go-openapi/runtime/client" - "github.com/go-openapi/strfmt" - - garm_params "github.com/cloudbase/garm/params" -) - -// NewUpdateGiteaEndpointParams creates a new UpdateGiteaEndpointParams object, -// with the default timeout for this client. -// -// Default values are not hydrated, since defaults are normally applied by the API server side. -// -// To enforce default values in parameter, use SetDefaults or WithDefaults. -func NewUpdateGiteaEndpointParams() *UpdateGiteaEndpointParams { - return &UpdateGiteaEndpointParams{ - timeout: cr.DefaultTimeout, - } -} - -// NewUpdateGiteaEndpointParamsWithTimeout creates a new UpdateGiteaEndpointParams object -// with the ability to set a timeout on a request. -func NewUpdateGiteaEndpointParamsWithTimeout(timeout time.Duration) *UpdateGiteaEndpointParams { - return &UpdateGiteaEndpointParams{ - timeout: timeout, - } -} - -// NewUpdateGiteaEndpointParamsWithContext creates a new UpdateGiteaEndpointParams object -// with the ability to set a context for a request. -func NewUpdateGiteaEndpointParamsWithContext(ctx context.Context) *UpdateGiteaEndpointParams { - return &UpdateGiteaEndpointParams{ - Context: ctx, - } -} - -// NewUpdateGiteaEndpointParamsWithHTTPClient creates a new UpdateGiteaEndpointParams object -// with the ability to set a custom HTTPClient for a request. -func NewUpdateGiteaEndpointParamsWithHTTPClient(client *http.Client) *UpdateGiteaEndpointParams { - return &UpdateGiteaEndpointParams{ - HTTPClient: client, - } -} - -/* -UpdateGiteaEndpointParams contains all the parameters to send to the API endpoint - - for the update gitea endpoint operation. - - Typically these are written to a http.Request. -*/ -type UpdateGiteaEndpointParams struct { - - /* Body. - - Parameters used when updating a Gitea endpoint. - */ - Body garm_params.UpdateGiteaEndpointParams - - /* Name. - - The name of the Gitea endpoint. - */ - Name string - - timeout time.Duration - Context context.Context - HTTPClient *http.Client -} - -// WithDefaults hydrates default values in the update gitea endpoint params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *UpdateGiteaEndpointParams) WithDefaults() *UpdateGiteaEndpointParams { - o.SetDefaults() - return o -} - -// SetDefaults hydrates default values in the update gitea endpoint params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *UpdateGiteaEndpointParams) SetDefaults() { - // no default values defined for this parameter -} - -// WithTimeout adds the timeout to the update gitea endpoint params -func (o *UpdateGiteaEndpointParams) WithTimeout(timeout time.Duration) *UpdateGiteaEndpointParams { - o.SetTimeout(timeout) - return o -} - -// SetTimeout adds the timeout to the update gitea endpoint params -func (o *UpdateGiteaEndpointParams) SetTimeout(timeout time.Duration) { - o.timeout = timeout -} - -// WithContext adds the context to the update gitea endpoint params -func (o *UpdateGiteaEndpointParams) WithContext(ctx context.Context) *UpdateGiteaEndpointParams { - o.SetContext(ctx) - return o -} - -// SetContext adds the context to the update gitea endpoint params -func (o *UpdateGiteaEndpointParams) SetContext(ctx context.Context) { - o.Context = ctx -} - -// WithHTTPClient adds the HTTPClient to the update gitea endpoint params -func (o *UpdateGiteaEndpointParams) WithHTTPClient(client *http.Client) *UpdateGiteaEndpointParams { - o.SetHTTPClient(client) - return o -} - -// SetHTTPClient adds the HTTPClient to the update gitea endpoint params -func (o *UpdateGiteaEndpointParams) SetHTTPClient(client *http.Client) { - o.HTTPClient = client -} - -// WithBody adds the body to the update gitea endpoint params -func (o *UpdateGiteaEndpointParams) WithBody(body garm_params.UpdateGiteaEndpointParams) *UpdateGiteaEndpointParams { - o.SetBody(body) - return o -} - -// SetBody adds the body to the update gitea endpoint params -func (o *UpdateGiteaEndpointParams) SetBody(body garm_params.UpdateGiteaEndpointParams) { - o.Body = body -} - -// WithName adds the name to the update gitea endpoint params -func (o *UpdateGiteaEndpointParams) WithName(name string) *UpdateGiteaEndpointParams { - o.SetName(name) - return o -} - -// SetName adds the name to the update gitea endpoint params -func (o *UpdateGiteaEndpointParams) SetName(name string) { - o.Name = name -} - -// WriteToRequest writes these params to a swagger request -func (o *UpdateGiteaEndpointParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { - - if err := r.SetTimeout(o.timeout); err != nil { - return err - } - var res []error - if err := r.SetBodyParam(o.Body); err != nil { - return err - } - - // path param name - if err := r.SetPathParam("name", o.Name); err != nil { - return err - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} diff --git a/client/endpoints/update_gitea_endpoint_responses.go b/client/endpoints/update_gitea_endpoint_responses.go deleted file mode 100644 index 052f45fa..00000000 --- a/client/endpoints/update_gitea_endpoint_responses.go +++ /dev/null @@ -1,184 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package endpoints - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "encoding/json" - "fmt" - "io" - - "github.com/go-openapi/runtime" - "github.com/go-openapi/strfmt" - - apiserver_params "github.com/cloudbase/garm/apiserver/params" - garm_params "github.com/cloudbase/garm/params" -) - -// UpdateGiteaEndpointReader is a Reader for the UpdateGiteaEndpoint structure. -type UpdateGiteaEndpointReader struct { - formats strfmt.Registry -} - -// ReadResponse reads a server response into the received o. -func (o *UpdateGiteaEndpointReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { - switch response.Code() { - case 200: - result := NewUpdateGiteaEndpointOK() - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - return result, nil - default: - result := NewUpdateGiteaEndpointDefault(response.Code()) - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - if response.Code()/100 == 2 { - return result, nil - } - return nil, result - } -} - -// NewUpdateGiteaEndpointOK creates a UpdateGiteaEndpointOK with default headers values -func NewUpdateGiteaEndpointOK() *UpdateGiteaEndpointOK { - return &UpdateGiteaEndpointOK{} -} - -/* -UpdateGiteaEndpointOK describes a response with status code 200, with default header values. - -ForgeEndpoint -*/ -type UpdateGiteaEndpointOK struct { - Payload garm_params.ForgeEndpoint -} - -// IsSuccess returns true when this update gitea endpoint o k response has a 2xx status code -func (o *UpdateGiteaEndpointOK) IsSuccess() bool { - return true -} - -// IsRedirect returns true when this update gitea endpoint o k response has a 3xx status code -func (o *UpdateGiteaEndpointOK) IsRedirect() bool { - return false -} - -// IsClientError returns true when this update gitea endpoint o k response has a 4xx status code -func (o *UpdateGiteaEndpointOK) IsClientError() bool { - return false -} - -// IsServerError returns true when this update gitea endpoint o k response has a 5xx status code -func (o *UpdateGiteaEndpointOK) IsServerError() bool { - return false -} - -// IsCode returns true when this update gitea endpoint o k response a status code equal to that given -func (o *UpdateGiteaEndpointOK) IsCode(code int) bool { - return code == 200 -} - -// Code gets the status code for the update gitea endpoint o k response -func (o *UpdateGiteaEndpointOK) Code() int { - return 200 -} - -func (o *UpdateGiteaEndpointOK) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[PUT /gitea/endpoints/{name}][%d] updateGiteaEndpointOK %s", 200, payload) -} - -func (o *UpdateGiteaEndpointOK) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[PUT /gitea/endpoints/{name}][%d] updateGiteaEndpointOK %s", 200, payload) -} - -func (o *UpdateGiteaEndpointOK) GetPayload() garm_params.ForgeEndpoint { - return o.Payload -} - -func (o *UpdateGiteaEndpointOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} - -// NewUpdateGiteaEndpointDefault creates a UpdateGiteaEndpointDefault with default headers values -func NewUpdateGiteaEndpointDefault(code int) *UpdateGiteaEndpointDefault { - return &UpdateGiteaEndpointDefault{ - _statusCode: code, - } -} - -/* -UpdateGiteaEndpointDefault describes a response with status code -1, with default header values. - -APIErrorResponse -*/ -type UpdateGiteaEndpointDefault struct { - _statusCode int - - Payload apiserver_params.APIErrorResponse -} - -// IsSuccess returns true when this update gitea endpoint default response has a 2xx status code -func (o *UpdateGiteaEndpointDefault) IsSuccess() bool { - return o._statusCode/100 == 2 -} - -// IsRedirect returns true when this update gitea endpoint default response has a 3xx status code -func (o *UpdateGiteaEndpointDefault) IsRedirect() bool { - return o._statusCode/100 == 3 -} - -// IsClientError returns true when this update gitea endpoint default response has a 4xx status code -func (o *UpdateGiteaEndpointDefault) IsClientError() bool { - return o._statusCode/100 == 4 -} - -// IsServerError returns true when this update gitea endpoint default response has a 5xx status code -func (o *UpdateGiteaEndpointDefault) IsServerError() bool { - return o._statusCode/100 == 5 -} - -// IsCode returns true when this update gitea endpoint default response a status code equal to that given -func (o *UpdateGiteaEndpointDefault) IsCode(code int) bool { - return o._statusCode == code -} - -// Code gets the status code for the update gitea endpoint default response -func (o *UpdateGiteaEndpointDefault) Code() int { - return o._statusCode -} - -func (o *UpdateGiteaEndpointDefault) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[PUT /gitea/endpoints/{name}][%d] UpdateGiteaEndpoint default %s", o._statusCode, payload) -} - -func (o *UpdateGiteaEndpointDefault) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[PUT /gitea/endpoints/{name}][%d] UpdateGiteaEndpoint default %s", o._statusCode, payload) -} - -func (o *UpdateGiteaEndpointDefault) GetPayload() apiserver_params.APIErrorResponse { - return o.Payload -} - -func (o *UpdateGiteaEndpointDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} diff --git a/client/endpoints/update_github_endpoint_responses.go b/client/endpoints/update_github_endpoint_responses.go index 27cd4a71..234ed711 100644 --- a/client/endpoints/update_github_endpoint_responses.go +++ b/client/endpoints/update_github_endpoint_responses.go @@ -51,10 +51,10 @@ func NewUpdateGithubEndpointOK() *UpdateGithubEndpointOK { /* UpdateGithubEndpointOK describes a response with status code 200, with default header values. -ForgeEndpoint +GithubEndpoint */ type UpdateGithubEndpointOK struct { - Payload garm_params.ForgeEndpoint + Payload garm_params.GithubEndpoint } // IsSuccess returns true when this update github endpoint o k response has a 2xx status code @@ -97,7 +97,7 @@ func (o *UpdateGithubEndpointOK) String() string { return fmt.Sprintf("[PUT /github/endpoints/{name}][%d] updateGithubEndpointOK %s", 200, payload) } -func (o *UpdateGithubEndpointOK) GetPayload() garm_params.ForgeEndpoint { +func (o *UpdateGithubEndpointOK) GetPayload() garm_params.GithubEndpoint { return o.Payload } diff --git a/client/enterprises/create_enterprise_scale_set_parameters.go b/client/enterprises/create_enterprise_scale_set_parameters.go deleted file mode 100644 index 76fe13ec..00000000 --- a/client/enterprises/create_enterprise_scale_set_parameters.go +++ /dev/null @@ -1,173 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package enterprises - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - "net/http" - "time" - - "github.com/go-openapi/errors" - "github.com/go-openapi/runtime" - cr "github.com/go-openapi/runtime/client" - "github.com/go-openapi/strfmt" - - garm_params "github.com/cloudbase/garm/params" -) - -// NewCreateEnterpriseScaleSetParams creates a new CreateEnterpriseScaleSetParams object, -// with the default timeout for this client. -// -// Default values are not hydrated, since defaults are normally applied by the API server side. -// -// To enforce default values in parameter, use SetDefaults or WithDefaults. -func NewCreateEnterpriseScaleSetParams() *CreateEnterpriseScaleSetParams { - return &CreateEnterpriseScaleSetParams{ - timeout: cr.DefaultTimeout, - } -} - -// NewCreateEnterpriseScaleSetParamsWithTimeout creates a new CreateEnterpriseScaleSetParams object -// with the ability to set a timeout on a request. -func NewCreateEnterpriseScaleSetParamsWithTimeout(timeout time.Duration) *CreateEnterpriseScaleSetParams { - return &CreateEnterpriseScaleSetParams{ - timeout: timeout, - } -} - -// NewCreateEnterpriseScaleSetParamsWithContext creates a new CreateEnterpriseScaleSetParams object -// with the ability to set a context for a request. -func NewCreateEnterpriseScaleSetParamsWithContext(ctx context.Context) *CreateEnterpriseScaleSetParams { - return &CreateEnterpriseScaleSetParams{ - Context: ctx, - } -} - -// NewCreateEnterpriseScaleSetParamsWithHTTPClient creates a new CreateEnterpriseScaleSetParams object -// with the ability to set a custom HTTPClient for a request. -func NewCreateEnterpriseScaleSetParamsWithHTTPClient(client *http.Client) *CreateEnterpriseScaleSetParams { - return &CreateEnterpriseScaleSetParams{ - HTTPClient: client, - } -} - -/* -CreateEnterpriseScaleSetParams contains all the parameters to send to the API endpoint - - for the create enterprise scale set operation. - - Typically these are written to a http.Request. -*/ -type CreateEnterpriseScaleSetParams struct { - - /* Body. - - Parameters used when creating the enterprise scale set. - */ - Body garm_params.CreateScaleSetParams - - /* EnterpriseID. - - Enterprise ID. - */ - EnterpriseID string - - timeout time.Duration - Context context.Context - HTTPClient *http.Client -} - -// WithDefaults hydrates default values in the create enterprise scale set params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *CreateEnterpriseScaleSetParams) WithDefaults() *CreateEnterpriseScaleSetParams { - o.SetDefaults() - return o -} - -// SetDefaults hydrates default values in the create enterprise scale set params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *CreateEnterpriseScaleSetParams) SetDefaults() { - // no default values defined for this parameter -} - -// WithTimeout adds the timeout to the create enterprise scale set params -func (o *CreateEnterpriseScaleSetParams) WithTimeout(timeout time.Duration) *CreateEnterpriseScaleSetParams { - o.SetTimeout(timeout) - return o -} - -// SetTimeout adds the timeout to the create enterprise scale set params -func (o *CreateEnterpriseScaleSetParams) SetTimeout(timeout time.Duration) { - o.timeout = timeout -} - -// WithContext adds the context to the create enterprise scale set params -func (o *CreateEnterpriseScaleSetParams) WithContext(ctx context.Context) *CreateEnterpriseScaleSetParams { - o.SetContext(ctx) - return o -} - -// SetContext adds the context to the create enterprise scale set params -func (o *CreateEnterpriseScaleSetParams) SetContext(ctx context.Context) { - o.Context = ctx -} - -// WithHTTPClient adds the HTTPClient to the create enterprise scale set params -func (o *CreateEnterpriseScaleSetParams) WithHTTPClient(client *http.Client) *CreateEnterpriseScaleSetParams { - o.SetHTTPClient(client) - return o -} - -// SetHTTPClient adds the HTTPClient to the create enterprise scale set params -func (o *CreateEnterpriseScaleSetParams) SetHTTPClient(client *http.Client) { - o.HTTPClient = client -} - -// WithBody adds the body to the create enterprise scale set params -func (o *CreateEnterpriseScaleSetParams) WithBody(body garm_params.CreateScaleSetParams) *CreateEnterpriseScaleSetParams { - o.SetBody(body) - return o -} - -// SetBody adds the body to the create enterprise scale set params -func (o *CreateEnterpriseScaleSetParams) SetBody(body garm_params.CreateScaleSetParams) { - o.Body = body -} - -// WithEnterpriseID adds the enterpriseID to the create enterprise scale set params -func (o *CreateEnterpriseScaleSetParams) WithEnterpriseID(enterpriseID string) *CreateEnterpriseScaleSetParams { - o.SetEnterpriseID(enterpriseID) - return o -} - -// SetEnterpriseID adds the enterpriseId to the create enterprise scale set params -func (o *CreateEnterpriseScaleSetParams) SetEnterpriseID(enterpriseID string) { - o.EnterpriseID = enterpriseID -} - -// WriteToRequest writes these params to a swagger request -func (o *CreateEnterpriseScaleSetParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { - - if err := r.SetTimeout(o.timeout); err != nil { - return err - } - var res []error - if err := r.SetBodyParam(o.Body); err != nil { - return err - } - - // path param enterpriseID - if err := r.SetPathParam("enterpriseID", o.EnterpriseID); err != nil { - return err - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} diff --git a/client/enterprises/create_enterprise_scale_set_responses.go b/client/enterprises/create_enterprise_scale_set_responses.go deleted file mode 100644 index 46107fc3..00000000 --- a/client/enterprises/create_enterprise_scale_set_responses.go +++ /dev/null @@ -1,184 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package enterprises - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "encoding/json" - "fmt" - "io" - - "github.com/go-openapi/runtime" - "github.com/go-openapi/strfmt" - - apiserver_params "github.com/cloudbase/garm/apiserver/params" - garm_params "github.com/cloudbase/garm/params" -) - -// CreateEnterpriseScaleSetReader is a Reader for the CreateEnterpriseScaleSet structure. -type CreateEnterpriseScaleSetReader struct { - formats strfmt.Registry -} - -// ReadResponse reads a server response into the received o. -func (o *CreateEnterpriseScaleSetReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { - switch response.Code() { - case 200: - result := NewCreateEnterpriseScaleSetOK() - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - return result, nil - default: - result := NewCreateEnterpriseScaleSetDefault(response.Code()) - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - if response.Code()/100 == 2 { - return result, nil - } - return nil, result - } -} - -// NewCreateEnterpriseScaleSetOK creates a CreateEnterpriseScaleSetOK with default headers values -func NewCreateEnterpriseScaleSetOK() *CreateEnterpriseScaleSetOK { - return &CreateEnterpriseScaleSetOK{} -} - -/* -CreateEnterpriseScaleSetOK describes a response with status code 200, with default header values. - -ScaleSet -*/ -type CreateEnterpriseScaleSetOK struct { - Payload garm_params.ScaleSet -} - -// IsSuccess returns true when this create enterprise scale set o k response has a 2xx status code -func (o *CreateEnterpriseScaleSetOK) IsSuccess() bool { - return true -} - -// IsRedirect returns true when this create enterprise scale set o k response has a 3xx status code -func (o *CreateEnterpriseScaleSetOK) IsRedirect() bool { - return false -} - -// IsClientError returns true when this create enterprise scale set o k response has a 4xx status code -func (o *CreateEnterpriseScaleSetOK) IsClientError() bool { - return false -} - -// IsServerError returns true when this create enterprise scale set o k response has a 5xx status code -func (o *CreateEnterpriseScaleSetOK) IsServerError() bool { - return false -} - -// IsCode returns true when this create enterprise scale set o k response a status code equal to that given -func (o *CreateEnterpriseScaleSetOK) IsCode(code int) bool { - return code == 200 -} - -// Code gets the status code for the create enterprise scale set o k response -func (o *CreateEnterpriseScaleSetOK) Code() int { - return 200 -} - -func (o *CreateEnterpriseScaleSetOK) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[POST /enterprises/{enterpriseID}/scalesets][%d] createEnterpriseScaleSetOK %s", 200, payload) -} - -func (o *CreateEnterpriseScaleSetOK) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[POST /enterprises/{enterpriseID}/scalesets][%d] createEnterpriseScaleSetOK %s", 200, payload) -} - -func (o *CreateEnterpriseScaleSetOK) GetPayload() garm_params.ScaleSet { - return o.Payload -} - -func (o *CreateEnterpriseScaleSetOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} - -// NewCreateEnterpriseScaleSetDefault creates a CreateEnterpriseScaleSetDefault with default headers values -func NewCreateEnterpriseScaleSetDefault(code int) *CreateEnterpriseScaleSetDefault { - return &CreateEnterpriseScaleSetDefault{ - _statusCode: code, - } -} - -/* -CreateEnterpriseScaleSetDefault describes a response with status code -1, with default header values. - -APIErrorResponse -*/ -type CreateEnterpriseScaleSetDefault struct { - _statusCode int - - Payload apiserver_params.APIErrorResponse -} - -// IsSuccess returns true when this create enterprise scale set default response has a 2xx status code -func (o *CreateEnterpriseScaleSetDefault) IsSuccess() bool { - return o._statusCode/100 == 2 -} - -// IsRedirect returns true when this create enterprise scale set default response has a 3xx status code -func (o *CreateEnterpriseScaleSetDefault) IsRedirect() bool { - return o._statusCode/100 == 3 -} - -// IsClientError returns true when this create enterprise scale set default response has a 4xx status code -func (o *CreateEnterpriseScaleSetDefault) IsClientError() bool { - return o._statusCode/100 == 4 -} - -// IsServerError returns true when this create enterprise scale set default response has a 5xx status code -func (o *CreateEnterpriseScaleSetDefault) IsServerError() bool { - return o._statusCode/100 == 5 -} - -// IsCode returns true when this create enterprise scale set default response a status code equal to that given -func (o *CreateEnterpriseScaleSetDefault) IsCode(code int) bool { - return o._statusCode == code -} - -// Code gets the status code for the create enterprise scale set default response -func (o *CreateEnterpriseScaleSetDefault) Code() int { - return o._statusCode -} - -func (o *CreateEnterpriseScaleSetDefault) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[POST /enterprises/{enterpriseID}/scalesets][%d] CreateEnterpriseScaleSet default %s", o._statusCode, payload) -} - -func (o *CreateEnterpriseScaleSetDefault) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[POST /enterprises/{enterpriseID}/scalesets][%d] CreateEnterpriseScaleSet default %s", o._statusCode, payload) -} - -func (o *CreateEnterpriseScaleSetDefault) GetPayload() apiserver_params.APIErrorResponse { - return o.Payload -} - -func (o *CreateEnterpriseScaleSetDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} diff --git a/client/enterprises/enterprises_client.go b/client/enterprises/enterprises_client.go index 0014ca96..91ab1bff 100644 --- a/client/enterprises/enterprises_client.go +++ b/client/enterprises/enterprises_client.go @@ -58,8 +58,6 @@ type ClientService interface { CreateEnterprisePool(params *CreateEnterprisePoolParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateEnterprisePoolOK, error) - CreateEnterpriseScaleSet(params *CreateEnterpriseScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateEnterpriseScaleSetOK, error) - DeleteEnterprise(params *DeleteEnterpriseParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error DeleteEnterprisePool(params *DeleteEnterprisePoolParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error @@ -72,8 +70,6 @@ type ClientService interface { ListEnterprisePools(params *ListEnterprisePoolsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListEnterprisePoolsOK, error) - ListEnterpriseScaleSets(params *ListEnterpriseScaleSetsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListEnterpriseScaleSetsOK, error) - ListEnterprises(params *ListEnterprisesParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListEnterprisesOK, error) UpdateEnterprise(params *UpdateEnterpriseParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UpdateEnterpriseOK, error) @@ -159,44 +155,6 @@ func (a *Client) CreateEnterprisePool(params *CreateEnterprisePoolParams, authIn return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } -/* -CreateEnterpriseScaleSet creates enterprise pool with the parameters given -*/ -func (a *Client) CreateEnterpriseScaleSet(params *CreateEnterpriseScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateEnterpriseScaleSetOK, error) { - // TODO: Validate the params before sending - if params == nil { - params = NewCreateEnterpriseScaleSetParams() - } - op := &runtime.ClientOperation{ - ID: "CreateEnterpriseScaleSet", - Method: "POST", - PathPattern: "/enterprises/{enterpriseID}/scalesets", - ProducesMediaTypes: []string{"application/json"}, - ConsumesMediaTypes: []string{"application/json"}, - Schemes: []string{"http"}, - Params: params, - Reader: &CreateEnterpriseScaleSetReader{formats: a.formats}, - AuthInfo: authInfo, - Context: params.Context, - Client: params.HTTPClient, - } - for _, opt := range opts { - opt(op) - } - - result, err := a.transport.Submit(op) - if err != nil { - return nil, err - } - success, ok := result.(*CreateEnterpriseScaleSetOK) - if ok { - return success, nil - } - // unexpected success response - unexpectedSuccess := result.(*CreateEnterpriseScaleSetDefault) - return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) -} - /* DeleteEnterprise deletes enterprise by ID */ @@ -413,44 +371,6 @@ func (a *Client) ListEnterprisePools(params *ListEnterprisePoolsParams, authInfo return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } -/* -ListEnterpriseScaleSets lists enterprise scale sets -*/ -func (a *Client) ListEnterpriseScaleSets(params *ListEnterpriseScaleSetsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListEnterpriseScaleSetsOK, error) { - // TODO: Validate the params before sending - if params == nil { - params = NewListEnterpriseScaleSetsParams() - } - op := &runtime.ClientOperation{ - ID: "ListEnterpriseScaleSets", - Method: "GET", - PathPattern: "/enterprises/{enterpriseID}/scalesets", - ProducesMediaTypes: []string{"application/json"}, - ConsumesMediaTypes: []string{"application/json"}, - Schemes: []string{"http"}, - Params: params, - Reader: &ListEnterpriseScaleSetsReader{formats: a.formats}, - AuthInfo: authInfo, - Context: params.Context, - Client: params.HTTPClient, - } - for _, opt := range opts { - opt(op) - } - - result, err := a.transport.Submit(op) - if err != nil { - return nil, err - } - success, ok := result.(*ListEnterpriseScaleSetsOK) - if ok { - return success, nil - } - // unexpected success response - unexpectedSuccess := result.(*ListEnterpriseScaleSetsDefault) - return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) -} - /* ListEnterprises lists all enterprises */ diff --git a/client/enterprises/list_enterprise_scale_sets_parameters.go b/client/enterprises/list_enterprise_scale_sets_parameters.go deleted file mode 100644 index f835717c..00000000 --- a/client/enterprises/list_enterprise_scale_sets_parameters.go +++ /dev/null @@ -1,151 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package enterprises - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - "net/http" - "time" - - "github.com/go-openapi/errors" - "github.com/go-openapi/runtime" - cr "github.com/go-openapi/runtime/client" - "github.com/go-openapi/strfmt" -) - -// NewListEnterpriseScaleSetsParams creates a new ListEnterpriseScaleSetsParams object, -// with the default timeout for this client. -// -// Default values are not hydrated, since defaults are normally applied by the API server side. -// -// To enforce default values in parameter, use SetDefaults or WithDefaults. -func NewListEnterpriseScaleSetsParams() *ListEnterpriseScaleSetsParams { - return &ListEnterpriseScaleSetsParams{ - timeout: cr.DefaultTimeout, - } -} - -// NewListEnterpriseScaleSetsParamsWithTimeout creates a new ListEnterpriseScaleSetsParams object -// with the ability to set a timeout on a request. -func NewListEnterpriseScaleSetsParamsWithTimeout(timeout time.Duration) *ListEnterpriseScaleSetsParams { - return &ListEnterpriseScaleSetsParams{ - timeout: timeout, - } -} - -// NewListEnterpriseScaleSetsParamsWithContext creates a new ListEnterpriseScaleSetsParams object -// with the ability to set a context for a request. -func NewListEnterpriseScaleSetsParamsWithContext(ctx context.Context) *ListEnterpriseScaleSetsParams { - return &ListEnterpriseScaleSetsParams{ - Context: ctx, - } -} - -// NewListEnterpriseScaleSetsParamsWithHTTPClient creates a new ListEnterpriseScaleSetsParams object -// with the ability to set a custom HTTPClient for a request. -func NewListEnterpriseScaleSetsParamsWithHTTPClient(client *http.Client) *ListEnterpriseScaleSetsParams { - return &ListEnterpriseScaleSetsParams{ - HTTPClient: client, - } -} - -/* -ListEnterpriseScaleSetsParams contains all the parameters to send to the API endpoint - - for the list enterprise scale sets operation. - - Typically these are written to a http.Request. -*/ -type ListEnterpriseScaleSetsParams struct { - - /* EnterpriseID. - - Enterprise ID. - */ - EnterpriseID string - - timeout time.Duration - Context context.Context - HTTPClient *http.Client -} - -// WithDefaults hydrates default values in the list enterprise scale sets params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *ListEnterpriseScaleSetsParams) WithDefaults() *ListEnterpriseScaleSetsParams { - o.SetDefaults() - return o -} - -// SetDefaults hydrates default values in the list enterprise scale sets params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *ListEnterpriseScaleSetsParams) SetDefaults() { - // no default values defined for this parameter -} - -// WithTimeout adds the timeout to the list enterprise scale sets params -func (o *ListEnterpriseScaleSetsParams) WithTimeout(timeout time.Duration) *ListEnterpriseScaleSetsParams { - o.SetTimeout(timeout) - return o -} - -// SetTimeout adds the timeout to the list enterprise scale sets params -func (o *ListEnterpriseScaleSetsParams) SetTimeout(timeout time.Duration) { - o.timeout = timeout -} - -// WithContext adds the context to the list enterprise scale sets params -func (o *ListEnterpriseScaleSetsParams) WithContext(ctx context.Context) *ListEnterpriseScaleSetsParams { - o.SetContext(ctx) - return o -} - -// SetContext adds the context to the list enterprise scale sets params -func (o *ListEnterpriseScaleSetsParams) SetContext(ctx context.Context) { - o.Context = ctx -} - -// WithHTTPClient adds the HTTPClient to the list enterprise scale sets params -func (o *ListEnterpriseScaleSetsParams) WithHTTPClient(client *http.Client) *ListEnterpriseScaleSetsParams { - o.SetHTTPClient(client) - return o -} - -// SetHTTPClient adds the HTTPClient to the list enterprise scale sets params -func (o *ListEnterpriseScaleSetsParams) SetHTTPClient(client *http.Client) { - o.HTTPClient = client -} - -// WithEnterpriseID adds the enterpriseID to the list enterprise scale sets params -func (o *ListEnterpriseScaleSetsParams) WithEnterpriseID(enterpriseID string) *ListEnterpriseScaleSetsParams { - o.SetEnterpriseID(enterpriseID) - return o -} - -// SetEnterpriseID adds the enterpriseId to the list enterprise scale sets params -func (o *ListEnterpriseScaleSetsParams) SetEnterpriseID(enterpriseID string) { - o.EnterpriseID = enterpriseID -} - -// WriteToRequest writes these params to a swagger request -func (o *ListEnterpriseScaleSetsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { - - if err := r.SetTimeout(o.timeout); err != nil { - return err - } - var res []error - - // path param enterpriseID - if err := r.SetPathParam("enterpriseID", o.EnterpriseID); err != nil { - return err - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} diff --git a/client/enterprises/list_enterprise_scale_sets_responses.go b/client/enterprises/list_enterprise_scale_sets_responses.go deleted file mode 100644 index 9c2564c2..00000000 --- a/client/enterprises/list_enterprise_scale_sets_responses.go +++ /dev/null @@ -1,184 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package enterprises - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "encoding/json" - "fmt" - "io" - - "github.com/go-openapi/runtime" - "github.com/go-openapi/strfmt" - - apiserver_params "github.com/cloudbase/garm/apiserver/params" - garm_params "github.com/cloudbase/garm/params" -) - -// ListEnterpriseScaleSetsReader is a Reader for the ListEnterpriseScaleSets structure. -type ListEnterpriseScaleSetsReader struct { - formats strfmt.Registry -} - -// ReadResponse reads a server response into the received o. -func (o *ListEnterpriseScaleSetsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { - switch response.Code() { - case 200: - result := NewListEnterpriseScaleSetsOK() - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - return result, nil - default: - result := NewListEnterpriseScaleSetsDefault(response.Code()) - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - if response.Code()/100 == 2 { - return result, nil - } - return nil, result - } -} - -// NewListEnterpriseScaleSetsOK creates a ListEnterpriseScaleSetsOK with default headers values -func NewListEnterpriseScaleSetsOK() *ListEnterpriseScaleSetsOK { - return &ListEnterpriseScaleSetsOK{} -} - -/* -ListEnterpriseScaleSetsOK describes a response with status code 200, with default header values. - -ScaleSets -*/ -type ListEnterpriseScaleSetsOK struct { - Payload garm_params.ScaleSets -} - -// IsSuccess returns true when this list enterprise scale sets o k response has a 2xx status code -func (o *ListEnterpriseScaleSetsOK) IsSuccess() bool { - return true -} - -// IsRedirect returns true when this list enterprise scale sets o k response has a 3xx status code -func (o *ListEnterpriseScaleSetsOK) IsRedirect() bool { - return false -} - -// IsClientError returns true when this list enterprise scale sets o k response has a 4xx status code -func (o *ListEnterpriseScaleSetsOK) IsClientError() bool { - return false -} - -// IsServerError returns true when this list enterprise scale sets o k response has a 5xx status code -func (o *ListEnterpriseScaleSetsOK) IsServerError() bool { - return false -} - -// IsCode returns true when this list enterprise scale sets o k response a status code equal to that given -func (o *ListEnterpriseScaleSetsOK) IsCode(code int) bool { - return code == 200 -} - -// Code gets the status code for the list enterprise scale sets o k response -func (o *ListEnterpriseScaleSetsOK) Code() int { - return 200 -} - -func (o *ListEnterpriseScaleSetsOK) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /enterprises/{enterpriseID}/scalesets][%d] listEnterpriseScaleSetsOK %s", 200, payload) -} - -func (o *ListEnterpriseScaleSetsOK) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /enterprises/{enterpriseID}/scalesets][%d] listEnterpriseScaleSetsOK %s", 200, payload) -} - -func (o *ListEnterpriseScaleSetsOK) GetPayload() garm_params.ScaleSets { - return o.Payload -} - -func (o *ListEnterpriseScaleSetsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} - -// NewListEnterpriseScaleSetsDefault creates a ListEnterpriseScaleSetsDefault with default headers values -func NewListEnterpriseScaleSetsDefault(code int) *ListEnterpriseScaleSetsDefault { - return &ListEnterpriseScaleSetsDefault{ - _statusCode: code, - } -} - -/* -ListEnterpriseScaleSetsDefault describes a response with status code -1, with default header values. - -APIErrorResponse -*/ -type ListEnterpriseScaleSetsDefault struct { - _statusCode int - - Payload apiserver_params.APIErrorResponse -} - -// IsSuccess returns true when this list enterprise scale sets default response has a 2xx status code -func (o *ListEnterpriseScaleSetsDefault) IsSuccess() bool { - return o._statusCode/100 == 2 -} - -// IsRedirect returns true when this list enterprise scale sets default response has a 3xx status code -func (o *ListEnterpriseScaleSetsDefault) IsRedirect() bool { - return o._statusCode/100 == 3 -} - -// IsClientError returns true when this list enterprise scale sets default response has a 4xx status code -func (o *ListEnterpriseScaleSetsDefault) IsClientError() bool { - return o._statusCode/100 == 4 -} - -// IsServerError returns true when this list enterprise scale sets default response has a 5xx status code -func (o *ListEnterpriseScaleSetsDefault) IsServerError() bool { - return o._statusCode/100 == 5 -} - -// IsCode returns true when this list enterprise scale sets default response a status code equal to that given -func (o *ListEnterpriseScaleSetsDefault) IsCode(code int) bool { - return o._statusCode == code -} - -// Code gets the status code for the list enterprise scale sets default response -func (o *ListEnterpriseScaleSetsDefault) Code() int { - return o._statusCode -} - -func (o *ListEnterpriseScaleSetsDefault) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /enterprises/{enterpriseID}/scalesets][%d] ListEnterpriseScaleSets default %s", o._statusCode, payload) -} - -func (o *ListEnterpriseScaleSetsDefault) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /enterprises/{enterpriseID}/scalesets][%d] ListEnterpriseScaleSets default %s", o._statusCode, payload) -} - -func (o *ListEnterpriseScaleSetsDefault) GetPayload() apiserver_params.APIErrorResponse { - return o.Payload -} - -func (o *ListEnterpriseScaleSetsDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} diff --git a/client/enterprises/list_enterprises_parameters.go b/client/enterprises/list_enterprises_parameters.go index 44ba108b..83291c5f 100644 --- a/client/enterprises/list_enterprises_parameters.go +++ b/client/enterprises/list_enterprises_parameters.go @@ -60,19 +60,6 @@ ListEnterprisesParams contains all the parameters to send to the API endpoint Typically these are written to a http.Request. */ type ListEnterprisesParams struct { - - /* Endpoint. - - Exact endpoint name to filter by - */ - Endpoint *string - - /* Name. - - Exact enterprise name to filter by - */ - Name *string - timeout time.Duration Context context.Context HTTPClient *http.Client @@ -126,28 +113,6 @@ func (o *ListEnterprisesParams) SetHTTPClient(client *http.Client) { o.HTTPClient = client } -// WithEndpoint adds the endpoint to the list enterprises params -func (o *ListEnterprisesParams) WithEndpoint(endpoint *string) *ListEnterprisesParams { - o.SetEndpoint(endpoint) - return o -} - -// SetEndpoint adds the endpoint to the list enterprises params -func (o *ListEnterprisesParams) SetEndpoint(endpoint *string) { - o.Endpoint = endpoint -} - -// WithName adds the name to the list enterprises params -func (o *ListEnterprisesParams) WithName(name *string) *ListEnterprisesParams { - o.SetName(name) - return o -} - -// SetName adds the name to the list enterprises params -func (o *ListEnterprisesParams) SetName(name *string) { - o.Name = name -} - // WriteToRequest writes these params to a swagger request func (o *ListEnterprisesParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { @@ -156,40 +121,6 @@ func (o *ListEnterprisesParams) WriteToRequest(r runtime.ClientRequest, reg strf } var res []error - if o.Endpoint != nil { - - // query param endpoint - var qrEndpoint string - - if o.Endpoint != nil { - qrEndpoint = *o.Endpoint - } - qEndpoint := qrEndpoint - if qEndpoint != "" { - - if err := r.SetQueryParam("endpoint", qEndpoint); err != nil { - return err - } - } - } - - if o.Name != nil { - - // query param name - var qrName string - - if o.Name != nil { - qrName = *o.Name - } - qName := qrName - if qName != "" { - - if err := r.SetQueryParam("name", qName); err != nil { - return err - } - } - } - if len(res) > 0 { return errors.CompositeValidationError(res...) } diff --git a/client/garm_api_client.go b/client/garm_api_client.go index f5bc51b2..cbc65dfc 100644 --- a/client/garm_api_client.go +++ b/client/garm_api_client.go @@ -24,7 +24,6 @@ import ( "github.com/cloudbase/garm/client/pools" "github.com/cloudbase/garm/client/providers" "github.com/cloudbase/garm/client/repositories" - "github.com/cloudbase/garm/client/scalesets" ) // Default garm API HTTP client. @@ -83,7 +82,6 @@ func New(transport runtime.ClientTransport, formats strfmt.Registry) *GarmAPI { cli.Pools = pools.New(transport, formats) cli.Providers = providers.New(transport, formats) cli.Repositories = repositories.New(transport, formats) - cli.Scalesets = scalesets.New(transport, formats) return cli } @@ -156,8 +154,6 @@ type GarmAPI struct { Repositories repositories.ClientService - Scalesets scalesets.ClientService - Transport runtime.ClientTransport } @@ -178,5 +174,4 @@ func (c *GarmAPI) SetTransport(transport runtime.ClientTransport) { c.Pools.SetTransport(transport) c.Providers.SetTransport(transport) c.Repositories.SetTransport(transport) - c.Scalesets.SetTransport(transport) } diff --git a/client/instances/instances_client.go b/client/instances/instances_client.go index 2c41f919..5b6af6f3 100644 --- a/client/instances/instances_client.go +++ b/client/instances/instances_client.go @@ -62,8 +62,6 @@ type ClientService interface { ListPoolInstances(params *ListPoolInstancesParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListPoolInstancesOK, error) - ListScaleSetInstances(params *ListScaleSetInstancesParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListScaleSetInstancesOK, error) - SetTransport(transport runtime.ClientTransport) } @@ -213,44 +211,6 @@ func (a *Client) ListPoolInstances(params *ListPoolInstancesParams, authInfo run return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } -/* -ListScaleSetInstances lists runner instances in a scale set -*/ -func (a *Client) ListScaleSetInstances(params *ListScaleSetInstancesParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListScaleSetInstancesOK, error) { - // TODO: Validate the params before sending - if params == nil { - params = NewListScaleSetInstancesParams() - } - op := &runtime.ClientOperation{ - ID: "ListScaleSetInstances", - Method: "GET", - PathPattern: "/scalesets/{scalesetID}/instances", - ProducesMediaTypes: []string{"application/json"}, - ConsumesMediaTypes: []string{"application/json"}, - Schemes: []string{"http"}, - Params: params, - Reader: &ListScaleSetInstancesReader{formats: a.formats}, - AuthInfo: authInfo, - Context: params.Context, - Client: params.HTTPClient, - } - for _, opt := range opts { - opt(op) - } - - result, err := a.transport.Submit(op) - if err != nil { - return nil, err - } - success, ok := result.(*ListScaleSetInstancesOK) - if ok { - return success, nil - } - // unexpected success response - unexpectedSuccess := result.(*ListScaleSetInstancesDefault) - return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) -} - // SetTransport changes the transport on the client func (a *Client) SetTransport(transport runtime.ClientTransport) { a.transport = transport diff --git a/client/instances/list_scale_set_instances_parameters.go b/client/instances/list_scale_set_instances_parameters.go deleted file mode 100644 index 7b38ef82..00000000 --- a/client/instances/list_scale_set_instances_parameters.go +++ /dev/null @@ -1,151 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package instances - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - "net/http" - "time" - - "github.com/go-openapi/errors" - "github.com/go-openapi/runtime" - cr "github.com/go-openapi/runtime/client" - "github.com/go-openapi/strfmt" -) - -// NewListScaleSetInstancesParams creates a new ListScaleSetInstancesParams object, -// with the default timeout for this client. -// -// Default values are not hydrated, since defaults are normally applied by the API server side. -// -// To enforce default values in parameter, use SetDefaults or WithDefaults. -func NewListScaleSetInstancesParams() *ListScaleSetInstancesParams { - return &ListScaleSetInstancesParams{ - timeout: cr.DefaultTimeout, - } -} - -// NewListScaleSetInstancesParamsWithTimeout creates a new ListScaleSetInstancesParams object -// with the ability to set a timeout on a request. -func NewListScaleSetInstancesParamsWithTimeout(timeout time.Duration) *ListScaleSetInstancesParams { - return &ListScaleSetInstancesParams{ - timeout: timeout, - } -} - -// NewListScaleSetInstancesParamsWithContext creates a new ListScaleSetInstancesParams object -// with the ability to set a context for a request. -func NewListScaleSetInstancesParamsWithContext(ctx context.Context) *ListScaleSetInstancesParams { - return &ListScaleSetInstancesParams{ - Context: ctx, - } -} - -// NewListScaleSetInstancesParamsWithHTTPClient creates a new ListScaleSetInstancesParams object -// with the ability to set a custom HTTPClient for a request. -func NewListScaleSetInstancesParamsWithHTTPClient(client *http.Client) *ListScaleSetInstancesParams { - return &ListScaleSetInstancesParams{ - HTTPClient: client, - } -} - -/* -ListScaleSetInstancesParams contains all the parameters to send to the API endpoint - - for the list scale set instances operation. - - Typically these are written to a http.Request. -*/ -type ListScaleSetInstancesParams struct { - - /* ScalesetID. - - Runner scale set ID. - */ - ScalesetID string - - timeout time.Duration - Context context.Context - HTTPClient *http.Client -} - -// WithDefaults hydrates default values in the list scale set instances params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *ListScaleSetInstancesParams) WithDefaults() *ListScaleSetInstancesParams { - o.SetDefaults() - return o -} - -// SetDefaults hydrates default values in the list scale set instances params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *ListScaleSetInstancesParams) SetDefaults() { - // no default values defined for this parameter -} - -// WithTimeout adds the timeout to the list scale set instances params -func (o *ListScaleSetInstancesParams) WithTimeout(timeout time.Duration) *ListScaleSetInstancesParams { - o.SetTimeout(timeout) - return o -} - -// SetTimeout adds the timeout to the list scale set instances params -func (o *ListScaleSetInstancesParams) SetTimeout(timeout time.Duration) { - o.timeout = timeout -} - -// WithContext adds the context to the list scale set instances params -func (o *ListScaleSetInstancesParams) WithContext(ctx context.Context) *ListScaleSetInstancesParams { - o.SetContext(ctx) - return o -} - -// SetContext adds the context to the list scale set instances params -func (o *ListScaleSetInstancesParams) SetContext(ctx context.Context) { - o.Context = ctx -} - -// WithHTTPClient adds the HTTPClient to the list scale set instances params -func (o *ListScaleSetInstancesParams) WithHTTPClient(client *http.Client) *ListScaleSetInstancesParams { - o.SetHTTPClient(client) - return o -} - -// SetHTTPClient adds the HTTPClient to the list scale set instances params -func (o *ListScaleSetInstancesParams) SetHTTPClient(client *http.Client) { - o.HTTPClient = client -} - -// WithScalesetID adds the scalesetID to the list scale set instances params -func (o *ListScaleSetInstancesParams) WithScalesetID(scalesetID string) *ListScaleSetInstancesParams { - o.SetScalesetID(scalesetID) - return o -} - -// SetScalesetID adds the scalesetId to the list scale set instances params -func (o *ListScaleSetInstancesParams) SetScalesetID(scalesetID string) { - o.ScalesetID = scalesetID -} - -// WriteToRequest writes these params to a swagger request -func (o *ListScaleSetInstancesParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { - - if err := r.SetTimeout(o.timeout); err != nil { - return err - } - var res []error - - // path param scalesetID - if err := r.SetPathParam("scalesetID", o.ScalesetID); err != nil { - return err - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} diff --git a/client/instances/list_scale_set_instances_responses.go b/client/instances/list_scale_set_instances_responses.go deleted file mode 100644 index a966a9e7..00000000 --- a/client/instances/list_scale_set_instances_responses.go +++ /dev/null @@ -1,184 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package instances - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "encoding/json" - "fmt" - "io" - - "github.com/go-openapi/runtime" - "github.com/go-openapi/strfmt" - - apiserver_params "github.com/cloudbase/garm/apiserver/params" - garm_params "github.com/cloudbase/garm/params" -) - -// ListScaleSetInstancesReader is a Reader for the ListScaleSetInstances structure. -type ListScaleSetInstancesReader struct { - formats strfmt.Registry -} - -// ReadResponse reads a server response into the received o. -func (o *ListScaleSetInstancesReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { - switch response.Code() { - case 200: - result := NewListScaleSetInstancesOK() - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - return result, nil - default: - result := NewListScaleSetInstancesDefault(response.Code()) - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - if response.Code()/100 == 2 { - return result, nil - } - return nil, result - } -} - -// NewListScaleSetInstancesOK creates a ListScaleSetInstancesOK with default headers values -func NewListScaleSetInstancesOK() *ListScaleSetInstancesOK { - return &ListScaleSetInstancesOK{} -} - -/* -ListScaleSetInstancesOK describes a response with status code 200, with default header values. - -Instances -*/ -type ListScaleSetInstancesOK struct { - Payload garm_params.Instances -} - -// IsSuccess returns true when this list scale set instances o k response has a 2xx status code -func (o *ListScaleSetInstancesOK) IsSuccess() bool { - return true -} - -// IsRedirect returns true when this list scale set instances o k response has a 3xx status code -func (o *ListScaleSetInstancesOK) IsRedirect() bool { - return false -} - -// IsClientError returns true when this list scale set instances o k response has a 4xx status code -func (o *ListScaleSetInstancesOK) IsClientError() bool { - return false -} - -// IsServerError returns true when this list scale set instances o k response has a 5xx status code -func (o *ListScaleSetInstancesOK) IsServerError() bool { - return false -} - -// IsCode returns true when this list scale set instances o k response a status code equal to that given -func (o *ListScaleSetInstancesOK) IsCode(code int) bool { - return code == 200 -} - -// Code gets the status code for the list scale set instances o k response -func (o *ListScaleSetInstancesOK) Code() int { - return 200 -} - -func (o *ListScaleSetInstancesOK) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /scalesets/{scalesetID}/instances][%d] listScaleSetInstancesOK %s", 200, payload) -} - -func (o *ListScaleSetInstancesOK) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /scalesets/{scalesetID}/instances][%d] listScaleSetInstancesOK %s", 200, payload) -} - -func (o *ListScaleSetInstancesOK) GetPayload() garm_params.Instances { - return o.Payload -} - -func (o *ListScaleSetInstancesOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} - -// NewListScaleSetInstancesDefault creates a ListScaleSetInstancesDefault with default headers values -func NewListScaleSetInstancesDefault(code int) *ListScaleSetInstancesDefault { - return &ListScaleSetInstancesDefault{ - _statusCode: code, - } -} - -/* -ListScaleSetInstancesDefault describes a response with status code -1, with default header values. - -APIErrorResponse -*/ -type ListScaleSetInstancesDefault struct { - _statusCode int - - Payload apiserver_params.APIErrorResponse -} - -// IsSuccess returns true when this list scale set instances default response has a 2xx status code -func (o *ListScaleSetInstancesDefault) IsSuccess() bool { - return o._statusCode/100 == 2 -} - -// IsRedirect returns true when this list scale set instances default response has a 3xx status code -func (o *ListScaleSetInstancesDefault) IsRedirect() bool { - return o._statusCode/100 == 3 -} - -// IsClientError returns true when this list scale set instances default response has a 4xx status code -func (o *ListScaleSetInstancesDefault) IsClientError() bool { - return o._statusCode/100 == 4 -} - -// IsServerError returns true when this list scale set instances default response has a 5xx status code -func (o *ListScaleSetInstancesDefault) IsServerError() bool { - return o._statusCode/100 == 5 -} - -// IsCode returns true when this list scale set instances default response a status code equal to that given -func (o *ListScaleSetInstancesDefault) IsCode(code int) bool { - return o._statusCode == code -} - -// Code gets the status code for the list scale set instances default response -func (o *ListScaleSetInstancesDefault) Code() int { - return o._statusCode -} - -func (o *ListScaleSetInstancesDefault) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /scalesets/{scalesetID}/instances][%d] ListScaleSetInstances default %s", o._statusCode, payload) -} - -func (o *ListScaleSetInstancesDefault) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /scalesets/{scalesetID}/instances][%d] ListScaleSetInstances default %s", o._statusCode, payload) -} - -func (o *ListScaleSetInstancesDefault) GetPayload() apiserver_params.APIErrorResponse { - return o.Payload -} - -func (o *ListScaleSetInstancesDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} diff --git a/client/organizations/create_org_scale_set_parameters.go b/client/organizations/create_org_scale_set_parameters.go deleted file mode 100644 index 0e222693..00000000 --- a/client/organizations/create_org_scale_set_parameters.go +++ /dev/null @@ -1,173 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package organizations - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - "net/http" - "time" - - "github.com/go-openapi/errors" - "github.com/go-openapi/runtime" - cr "github.com/go-openapi/runtime/client" - "github.com/go-openapi/strfmt" - - garm_params "github.com/cloudbase/garm/params" -) - -// NewCreateOrgScaleSetParams creates a new CreateOrgScaleSetParams object, -// with the default timeout for this client. -// -// Default values are not hydrated, since defaults are normally applied by the API server side. -// -// To enforce default values in parameter, use SetDefaults or WithDefaults. -func NewCreateOrgScaleSetParams() *CreateOrgScaleSetParams { - return &CreateOrgScaleSetParams{ - timeout: cr.DefaultTimeout, - } -} - -// NewCreateOrgScaleSetParamsWithTimeout creates a new CreateOrgScaleSetParams object -// with the ability to set a timeout on a request. -func NewCreateOrgScaleSetParamsWithTimeout(timeout time.Duration) *CreateOrgScaleSetParams { - return &CreateOrgScaleSetParams{ - timeout: timeout, - } -} - -// NewCreateOrgScaleSetParamsWithContext creates a new CreateOrgScaleSetParams object -// with the ability to set a context for a request. -func NewCreateOrgScaleSetParamsWithContext(ctx context.Context) *CreateOrgScaleSetParams { - return &CreateOrgScaleSetParams{ - Context: ctx, - } -} - -// NewCreateOrgScaleSetParamsWithHTTPClient creates a new CreateOrgScaleSetParams object -// with the ability to set a custom HTTPClient for a request. -func NewCreateOrgScaleSetParamsWithHTTPClient(client *http.Client) *CreateOrgScaleSetParams { - return &CreateOrgScaleSetParams{ - HTTPClient: client, - } -} - -/* -CreateOrgScaleSetParams contains all the parameters to send to the API endpoint - - for the create org scale set operation. - - Typically these are written to a http.Request. -*/ -type CreateOrgScaleSetParams struct { - - /* Body. - - Parameters used when creating the organization scale set. - */ - Body garm_params.CreateScaleSetParams - - /* OrgID. - - Organization ID. - */ - OrgID string - - timeout time.Duration - Context context.Context - HTTPClient *http.Client -} - -// WithDefaults hydrates default values in the create org scale set params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *CreateOrgScaleSetParams) WithDefaults() *CreateOrgScaleSetParams { - o.SetDefaults() - return o -} - -// SetDefaults hydrates default values in the create org scale set params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *CreateOrgScaleSetParams) SetDefaults() { - // no default values defined for this parameter -} - -// WithTimeout adds the timeout to the create org scale set params -func (o *CreateOrgScaleSetParams) WithTimeout(timeout time.Duration) *CreateOrgScaleSetParams { - o.SetTimeout(timeout) - return o -} - -// SetTimeout adds the timeout to the create org scale set params -func (o *CreateOrgScaleSetParams) SetTimeout(timeout time.Duration) { - o.timeout = timeout -} - -// WithContext adds the context to the create org scale set params -func (o *CreateOrgScaleSetParams) WithContext(ctx context.Context) *CreateOrgScaleSetParams { - o.SetContext(ctx) - return o -} - -// SetContext adds the context to the create org scale set params -func (o *CreateOrgScaleSetParams) SetContext(ctx context.Context) { - o.Context = ctx -} - -// WithHTTPClient adds the HTTPClient to the create org scale set params -func (o *CreateOrgScaleSetParams) WithHTTPClient(client *http.Client) *CreateOrgScaleSetParams { - o.SetHTTPClient(client) - return o -} - -// SetHTTPClient adds the HTTPClient to the create org scale set params -func (o *CreateOrgScaleSetParams) SetHTTPClient(client *http.Client) { - o.HTTPClient = client -} - -// WithBody adds the body to the create org scale set params -func (o *CreateOrgScaleSetParams) WithBody(body garm_params.CreateScaleSetParams) *CreateOrgScaleSetParams { - o.SetBody(body) - return o -} - -// SetBody adds the body to the create org scale set params -func (o *CreateOrgScaleSetParams) SetBody(body garm_params.CreateScaleSetParams) { - o.Body = body -} - -// WithOrgID adds the orgID to the create org scale set params -func (o *CreateOrgScaleSetParams) WithOrgID(orgID string) *CreateOrgScaleSetParams { - o.SetOrgID(orgID) - return o -} - -// SetOrgID adds the orgId to the create org scale set params -func (o *CreateOrgScaleSetParams) SetOrgID(orgID string) { - o.OrgID = orgID -} - -// WriteToRequest writes these params to a swagger request -func (o *CreateOrgScaleSetParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { - - if err := r.SetTimeout(o.timeout); err != nil { - return err - } - var res []error - if err := r.SetBodyParam(o.Body); err != nil { - return err - } - - // path param orgID - if err := r.SetPathParam("orgID", o.OrgID); err != nil { - return err - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} diff --git a/client/organizations/create_org_scale_set_responses.go b/client/organizations/create_org_scale_set_responses.go deleted file mode 100644 index 3a91d03f..00000000 --- a/client/organizations/create_org_scale_set_responses.go +++ /dev/null @@ -1,184 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package organizations - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "encoding/json" - "fmt" - "io" - - "github.com/go-openapi/runtime" - "github.com/go-openapi/strfmt" - - apiserver_params "github.com/cloudbase/garm/apiserver/params" - garm_params "github.com/cloudbase/garm/params" -) - -// CreateOrgScaleSetReader is a Reader for the CreateOrgScaleSet structure. -type CreateOrgScaleSetReader struct { - formats strfmt.Registry -} - -// ReadResponse reads a server response into the received o. -func (o *CreateOrgScaleSetReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { - switch response.Code() { - case 200: - result := NewCreateOrgScaleSetOK() - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - return result, nil - default: - result := NewCreateOrgScaleSetDefault(response.Code()) - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - if response.Code()/100 == 2 { - return result, nil - } - return nil, result - } -} - -// NewCreateOrgScaleSetOK creates a CreateOrgScaleSetOK with default headers values -func NewCreateOrgScaleSetOK() *CreateOrgScaleSetOK { - return &CreateOrgScaleSetOK{} -} - -/* -CreateOrgScaleSetOK describes a response with status code 200, with default header values. - -ScaleSet -*/ -type CreateOrgScaleSetOK struct { - Payload garm_params.ScaleSet -} - -// IsSuccess returns true when this create org scale set o k response has a 2xx status code -func (o *CreateOrgScaleSetOK) IsSuccess() bool { - return true -} - -// IsRedirect returns true when this create org scale set o k response has a 3xx status code -func (o *CreateOrgScaleSetOK) IsRedirect() bool { - return false -} - -// IsClientError returns true when this create org scale set o k response has a 4xx status code -func (o *CreateOrgScaleSetOK) IsClientError() bool { - return false -} - -// IsServerError returns true when this create org scale set o k response has a 5xx status code -func (o *CreateOrgScaleSetOK) IsServerError() bool { - return false -} - -// IsCode returns true when this create org scale set o k response a status code equal to that given -func (o *CreateOrgScaleSetOK) IsCode(code int) bool { - return code == 200 -} - -// Code gets the status code for the create org scale set o k response -func (o *CreateOrgScaleSetOK) Code() int { - return 200 -} - -func (o *CreateOrgScaleSetOK) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[POST /organizations/{orgID}/scalesets][%d] createOrgScaleSetOK %s", 200, payload) -} - -func (o *CreateOrgScaleSetOK) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[POST /organizations/{orgID}/scalesets][%d] createOrgScaleSetOK %s", 200, payload) -} - -func (o *CreateOrgScaleSetOK) GetPayload() garm_params.ScaleSet { - return o.Payload -} - -func (o *CreateOrgScaleSetOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} - -// NewCreateOrgScaleSetDefault creates a CreateOrgScaleSetDefault with default headers values -func NewCreateOrgScaleSetDefault(code int) *CreateOrgScaleSetDefault { - return &CreateOrgScaleSetDefault{ - _statusCode: code, - } -} - -/* -CreateOrgScaleSetDefault describes a response with status code -1, with default header values. - -APIErrorResponse -*/ -type CreateOrgScaleSetDefault struct { - _statusCode int - - Payload apiserver_params.APIErrorResponse -} - -// IsSuccess returns true when this create org scale set default response has a 2xx status code -func (o *CreateOrgScaleSetDefault) IsSuccess() bool { - return o._statusCode/100 == 2 -} - -// IsRedirect returns true when this create org scale set default response has a 3xx status code -func (o *CreateOrgScaleSetDefault) IsRedirect() bool { - return o._statusCode/100 == 3 -} - -// IsClientError returns true when this create org scale set default response has a 4xx status code -func (o *CreateOrgScaleSetDefault) IsClientError() bool { - return o._statusCode/100 == 4 -} - -// IsServerError returns true when this create org scale set default response has a 5xx status code -func (o *CreateOrgScaleSetDefault) IsServerError() bool { - return o._statusCode/100 == 5 -} - -// IsCode returns true when this create org scale set default response a status code equal to that given -func (o *CreateOrgScaleSetDefault) IsCode(code int) bool { - return o._statusCode == code -} - -// Code gets the status code for the create org scale set default response -func (o *CreateOrgScaleSetDefault) Code() int { - return o._statusCode -} - -func (o *CreateOrgScaleSetDefault) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[POST /organizations/{orgID}/scalesets][%d] CreateOrgScaleSet default %s", o._statusCode, payload) -} - -func (o *CreateOrgScaleSetDefault) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[POST /organizations/{orgID}/scalesets][%d] CreateOrgScaleSet default %s", o._statusCode, payload) -} - -func (o *CreateOrgScaleSetDefault) GetPayload() apiserver_params.APIErrorResponse { - return o.Payload -} - -func (o *CreateOrgScaleSetDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} diff --git a/client/organizations/list_org_scale_sets_parameters.go b/client/organizations/list_org_scale_sets_parameters.go deleted file mode 100644 index 711ec788..00000000 --- a/client/organizations/list_org_scale_sets_parameters.go +++ /dev/null @@ -1,151 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package organizations - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - "net/http" - "time" - - "github.com/go-openapi/errors" - "github.com/go-openapi/runtime" - cr "github.com/go-openapi/runtime/client" - "github.com/go-openapi/strfmt" -) - -// NewListOrgScaleSetsParams creates a new ListOrgScaleSetsParams object, -// with the default timeout for this client. -// -// Default values are not hydrated, since defaults are normally applied by the API server side. -// -// To enforce default values in parameter, use SetDefaults or WithDefaults. -func NewListOrgScaleSetsParams() *ListOrgScaleSetsParams { - return &ListOrgScaleSetsParams{ - timeout: cr.DefaultTimeout, - } -} - -// NewListOrgScaleSetsParamsWithTimeout creates a new ListOrgScaleSetsParams object -// with the ability to set a timeout on a request. -func NewListOrgScaleSetsParamsWithTimeout(timeout time.Duration) *ListOrgScaleSetsParams { - return &ListOrgScaleSetsParams{ - timeout: timeout, - } -} - -// NewListOrgScaleSetsParamsWithContext creates a new ListOrgScaleSetsParams object -// with the ability to set a context for a request. -func NewListOrgScaleSetsParamsWithContext(ctx context.Context) *ListOrgScaleSetsParams { - return &ListOrgScaleSetsParams{ - Context: ctx, - } -} - -// NewListOrgScaleSetsParamsWithHTTPClient creates a new ListOrgScaleSetsParams object -// with the ability to set a custom HTTPClient for a request. -func NewListOrgScaleSetsParamsWithHTTPClient(client *http.Client) *ListOrgScaleSetsParams { - return &ListOrgScaleSetsParams{ - HTTPClient: client, - } -} - -/* -ListOrgScaleSetsParams contains all the parameters to send to the API endpoint - - for the list org scale sets operation. - - Typically these are written to a http.Request. -*/ -type ListOrgScaleSetsParams struct { - - /* OrgID. - - Organization ID. - */ - OrgID string - - timeout time.Duration - Context context.Context - HTTPClient *http.Client -} - -// WithDefaults hydrates default values in the list org scale sets params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *ListOrgScaleSetsParams) WithDefaults() *ListOrgScaleSetsParams { - o.SetDefaults() - return o -} - -// SetDefaults hydrates default values in the list org scale sets params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *ListOrgScaleSetsParams) SetDefaults() { - // no default values defined for this parameter -} - -// WithTimeout adds the timeout to the list org scale sets params -func (o *ListOrgScaleSetsParams) WithTimeout(timeout time.Duration) *ListOrgScaleSetsParams { - o.SetTimeout(timeout) - return o -} - -// SetTimeout adds the timeout to the list org scale sets params -func (o *ListOrgScaleSetsParams) SetTimeout(timeout time.Duration) { - o.timeout = timeout -} - -// WithContext adds the context to the list org scale sets params -func (o *ListOrgScaleSetsParams) WithContext(ctx context.Context) *ListOrgScaleSetsParams { - o.SetContext(ctx) - return o -} - -// SetContext adds the context to the list org scale sets params -func (o *ListOrgScaleSetsParams) SetContext(ctx context.Context) { - o.Context = ctx -} - -// WithHTTPClient adds the HTTPClient to the list org scale sets params -func (o *ListOrgScaleSetsParams) WithHTTPClient(client *http.Client) *ListOrgScaleSetsParams { - o.SetHTTPClient(client) - return o -} - -// SetHTTPClient adds the HTTPClient to the list org scale sets params -func (o *ListOrgScaleSetsParams) SetHTTPClient(client *http.Client) { - o.HTTPClient = client -} - -// WithOrgID adds the orgID to the list org scale sets params -func (o *ListOrgScaleSetsParams) WithOrgID(orgID string) *ListOrgScaleSetsParams { - o.SetOrgID(orgID) - return o -} - -// SetOrgID adds the orgId to the list org scale sets params -func (o *ListOrgScaleSetsParams) SetOrgID(orgID string) { - o.OrgID = orgID -} - -// WriteToRequest writes these params to a swagger request -func (o *ListOrgScaleSetsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { - - if err := r.SetTimeout(o.timeout); err != nil { - return err - } - var res []error - - // path param orgID - if err := r.SetPathParam("orgID", o.OrgID); err != nil { - return err - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} diff --git a/client/organizations/list_org_scale_sets_responses.go b/client/organizations/list_org_scale_sets_responses.go deleted file mode 100644 index 0b470fa1..00000000 --- a/client/organizations/list_org_scale_sets_responses.go +++ /dev/null @@ -1,184 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package organizations - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "encoding/json" - "fmt" - "io" - - "github.com/go-openapi/runtime" - "github.com/go-openapi/strfmt" - - apiserver_params "github.com/cloudbase/garm/apiserver/params" - garm_params "github.com/cloudbase/garm/params" -) - -// ListOrgScaleSetsReader is a Reader for the ListOrgScaleSets structure. -type ListOrgScaleSetsReader struct { - formats strfmt.Registry -} - -// ReadResponse reads a server response into the received o. -func (o *ListOrgScaleSetsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { - switch response.Code() { - case 200: - result := NewListOrgScaleSetsOK() - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - return result, nil - default: - result := NewListOrgScaleSetsDefault(response.Code()) - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - if response.Code()/100 == 2 { - return result, nil - } - return nil, result - } -} - -// NewListOrgScaleSetsOK creates a ListOrgScaleSetsOK with default headers values -func NewListOrgScaleSetsOK() *ListOrgScaleSetsOK { - return &ListOrgScaleSetsOK{} -} - -/* -ListOrgScaleSetsOK describes a response with status code 200, with default header values. - -ScaleSets -*/ -type ListOrgScaleSetsOK struct { - Payload garm_params.ScaleSets -} - -// IsSuccess returns true when this list org scale sets o k response has a 2xx status code -func (o *ListOrgScaleSetsOK) IsSuccess() bool { - return true -} - -// IsRedirect returns true when this list org scale sets o k response has a 3xx status code -func (o *ListOrgScaleSetsOK) IsRedirect() bool { - return false -} - -// IsClientError returns true when this list org scale sets o k response has a 4xx status code -func (o *ListOrgScaleSetsOK) IsClientError() bool { - return false -} - -// IsServerError returns true when this list org scale sets o k response has a 5xx status code -func (o *ListOrgScaleSetsOK) IsServerError() bool { - return false -} - -// IsCode returns true when this list org scale sets o k response a status code equal to that given -func (o *ListOrgScaleSetsOK) IsCode(code int) bool { - return code == 200 -} - -// Code gets the status code for the list org scale sets o k response -func (o *ListOrgScaleSetsOK) Code() int { - return 200 -} - -func (o *ListOrgScaleSetsOK) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /organizations/{orgID}/scalesets][%d] listOrgScaleSetsOK %s", 200, payload) -} - -func (o *ListOrgScaleSetsOK) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /organizations/{orgID}/scalesets][%d] listOrgScaleSetsOK %s", 200, payload) -} - -func (o *ListOrgScaleSetsOK) GetPayload() garm_params.ScaleSets { - return o.Payload -} - -func (o *ListOrgScaleSetsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} - -// NewListOrgScaleSetsDefault creates a ListOrgScaleSetsDefault with default headers values -func NewListOrgScaleSetsDefault(code int) *ListOrgScaleSetsDefault { - return &ListOrgScaleSetsDefault{ - _statusCode: code, - } -} - -/* -ListOrgScaleSetsDefault describes a response with status code -1, with default header values. - -APIErrorResponse -*/ -type ListOrgScaleSetsDefault struct { - _statusCode int - - Payload apiserver_params.APIErrorResponse -} - -// IsSuccess returns true when this list org scale sets default response has a 2xx status code -func (o *ListOrgScaleSetsDefault) IsSuccess() bool { - return o._statusCode/100 == 2 -} - -// IsRedirect returns true when this list org scale sets default response has a 3xx status code -func (o *ListOrgScaleSetsDefault) IsRedirect() bool { - return o._statusCode/100 == 3 -} - -// IsClientError returns true when this list org scale sets default response has a 4xx status code -func (o *ListOrgScaleSetsDefault) IsClientError() bool { - return o._statusCode/100 == 4 -} - -// IsServerError returns true when this list org scale sets default response has a 5xx status code -func (o *ListOrgScaleSetsDefault) IsServerError() bool { - return o._statusCode/100 == 5 -} - -// IsCode returns true when this list org scale sets default response a status code equal to that given -func (o *ListOrgScaleSetsDefault) IsCode(code int) bool { - return o._statusCode == code -} - -// Code gets the status code for the list org scale sets default response -func (o *ListOrgScaleSetsDefault) Code() int { - return o._statusCode -} - -func (o *ListOrgScaleSetsDefault) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /organizations/{orgID}/scalesets][%d] ListOrgScaleSets default %s", o._statusCode, payload) -} - -func (o *ListOrgScaleSetsDefault) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /organizations/{orgID}/scalesets][%d] ListOrgScaleSets default %s", o._statusCode, payload) -} - -func (o *ListOrgScaleSetsDefault) GetPayload() apiserver_params.APIErrorResponse { - return o.Payload -} - -func (o *ListOrgScaleSetsDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} diff --git a/client/organizations/list_orgs_parameters.go b/client/organizations/list_orgs_parameters.go index af4c19c8..1441722f 100644 --- a/client/organizations/list_orgs_parameters.go +++ b/client/organizations/list_orgs_parameters.go @@ -60,19 +60,6 @@ ListOrgsParams contains all the parameters to send to the API endpoint Typically these are written to a http.Request. */ type ListOrgsParams struct { - - /* Endpoint. - - Exact endpoint name to filter by - */ - Endpoint *string - - /* Name. - - Exact organization name to filter by - */ - Name *string - timeout time.Duration Context context.Context HTTPClient *http.Client @@ -126,28 +113,6 @@ func (o *ListOrgsParams) SetHTTPClient(client *http.Client) { o.HTTPClient = client } -// WithEndpoint adds the endpoint to the list orgs params -func (o *ListOrgsParams) WithEndpoint(endpoint *string) *ListOrgsParams { - o.SetEndpoint(endpoint) - return o -} - -// SetEndpoint adds the endpoint to the list orgs params -func (o *ListOrgsParams) SetEndpoint(endpoint *string) { - o.Endpoint = endpoint -} - -// WithName adds the name to the list orgs params -func (o *ListOrgsParams) WithName(name *string) *ListOrgsParams { - o.SetName(name) - return o -} - -// SetName adds the name to the list orgs params -func (o *ListOrgsParams) SetName(name *string) { - o.Name = name -} - // WriteToRequest writes these params to a swagger request func (o *ListOrgsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { @@ -156,40 +121,6 @@ func (o *ListOrgsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Regi } var res []error - if o.Endpoint != nil { - - // query param endpoint - var qrEndpoint string - - if o.Endpoint != nil { - qrEndpoint = *o.Endpoint - } - qEndpoint := qrEndpoint - if qEndpoint != "" { - - if err := r.SetQueryParam("endpoint", qEndpoint); err != nil { - return err - } - } - } - - if o.Name != nil { - - // query param name - var qrName string - - if o.Name != nil { - qrName = *o.Name - } - qName := qrName - if qName != "" { - - if err := r.SetQueryParam("name", qName); err != nil { - return err - } - } - } - if len(res) > 0 { return errors.CompositeValidationError(res...) } diff --git a/client/organizations/organizations_client.go b/client/organizations/organizations_client.go index cd3e1211..5ce5d647 100644 --- a/client/organizations/organizations_client.go +++ b/client/organizations/organizations_client.go @@ -58,8 +58,6 @@ type ClientService interface { CreateOrgPool(params *CreateOrgPoolParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateOrgPoolOK, error) - CreateOrgScaleSet(params *CreateOrgScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateOrgScaleSetOK, error) - DeleteOrg(params *DeleteOrgParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error DeleteOrgPool(params *DeleteOrgPoolParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error @@ -76,8 +74,6 @@ type ClientService interface { ListOrgPools(params *ListOrgPoolsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListOrgPoolsOK, error) - ListOrgScaleSets(params *ListOrgScaleSetsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListOrgScaleSetsOK, error) - ListOrgs(params *ListOrgsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListOrgsOK, error) UninstallOrgWebhook(params *UninstallOrgWebhookParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error @@ -165,44 +161,6 @@ func (a *Client) CreateOrgPool(params *CreateOrgPoolParams, authInfo runtime.Cli return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } -/* -CreateOrgScaleSet creates organization scale set with the parameters given -*/ -func (a *Client) CreateOrgScaleSet(params *CreateOrgScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateOrgScaleSetOK, error) { - // TODO: Validate the params before sending - if params == nil { - params = NewCreateOrgScaleSetParams() - } - op := &runtime.ClientOperation{ - ID: "CreateOrgScaleSet", - Method: "POST", - PathPattern: "/organizations/{orgID}/scalesets", - ProducesMediaTypes: []string{"application/json"}, - ConsumesMediaTypes: []string{"application/json"}, - Schemes: []string{"http"}, - Params: params, - Reader: &CreateOrgScaleSetReader{formats: a.formats}, - AuthInfo: authInfo, - Context: params.Context, - Client: params.HTTPClient, - } - for _, opt := range opts { - opt(op) - } - - result, err := a.transport.Submit(op) - if err != nil { - return nil, err - } - success, ok := result.(*CreateOrgScaleSetOK) - if ok { - return success, nil - } - // unexpected success response - unexpectedSuccess := result.(*CreateOrgScaleSetDefault) - return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) -} - /* DeleteOrg deletes organization by ID */ @@ -497,44 +455,6 @@ func (a *Client) ListOrgPools(params *ListOrgPoolsParams, authInfo runtime.Clien return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } -/* -ListOrgScaleSets lists organization scale sets -*/ -func (a *Client) ListOrgScaleSets(params *ListOrgScaleSetsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListOrgScaleSetsOK, error) { - // TODO: Validate the params before sending - if params == nil { - params = NewListOrgScaleSetsParams() - } - op := &runtime.ClientOperation{ - ID: "ListOrgScaleSets", - Method: "GET", - PathPattern: "/organizations/{orgID}/scalesets", - ProducesMediaTypes: []string{"application/json"}, - ConsumesMediaTypes: []string{"application/json"}, - Schemes: []string{"http"}, - Params: params, - Reader: &ListOrgScaleSetsReader{formats: a.formats}, - AuthInfo: authInfo, - Context: params.Context, - Client: params.HTTPClient, - } - for _, opt := range opts { - opt(op) - } - - result, err := a.transport.Submit(op) - if err != nil { - return nil, err - } - success, ok := result.(*ListOrgScaleSetsOK) - if ok { - return success, nil - } - // unexpected success response - unexpectedSuccess := result.(*ListOrgScaleSetsDefault) - return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) -} - /* ListOrgs lists organizations */ diff --git a/client/repositories/create_repo_scale_set_parameters.go b/client/repositories/create_repo_scale_set_parameters.go deleted file mode 100644 index 9b8784dc..00000000 --- a/client/repositories/create_repo_scale_set_parameters.go +++ /dev/null @@ -1,173 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package repositories - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - "net/http" - "time" - - "github.com/go-openapi/errors" - "github.com/go-openapi/runtime" - cr "github.com/go-openapi/runtime/client" - "github.com/go-openapi/strfmt" - - garm_params "github.com/cloudbase/garm/params" -) - -// NewCreateRepoScaleSetParams creates a new CreateRepoScaleSetParams object, -// with the default timeout for this client. -// -// Default values are not hydrated, since defaults are normally applied by the API server side. -// -// To enforce default values in parameter, use SetDefaults or WithDefaults. -func NewCreateRepoScaleSetParams() *CreateRepoScaleSetParams { - return &CreateRepoScaleSetParams{ - timeout: cr.DefaultTimeout, - } -} - -// NewCreateRepoScaleSetParamsWithTimeout creates a new CreateRepoScaleSetParams object -// with the ability to set a timeout on a request. -func NewCreateRepoScaleSetParamsWithTimeout(timeout time.Duration) *CreateRepoScaleSetParams { - return &CreateRepoScaleSetParams{ - timeout: timeout, - } -} - -// NewCreateRepoScaleSetParamsWithContext creates a new CreateRepoScaleSetParams object -// with the ability to set a context for a request. -func NewCreateRepoScaleSetParamsWithContext(ctx context.Context) *CreateRepoScaleSetParams { - return &CreateRepoScaleSetParams{ - Context: ctx, - } -} - -// NewCreateRepoScaleSetParamsWithHTTPClient creates a new CreateRepoScaleSetParams object -// with the ability to set a custom HTTPClient for a request. -func NewCreateRepoScaleSetParamsWithHTTPClient(client *http.Client) *CreateRepoScaleSetParams { - return &CreateRepoScaleSetParams{ - HTTPClient: client, - } -} - -/* -CreateRepoScaleSetParams contains all the parameters to send to the API endpoint - - for the create repo scale set operation. - - Typically these are written to a http.Request. -*/ -type CreateRepoScaleSetParams struct { - - /* Body. - - Parameters used when creating the repository scale set. - */ - Body garm_params.CreateScaleSetParams - - /* RepoID. - - Repository ID. - */ - RepoID string - - timeout time.Duration - Context context.Context - HTTPClient *http.Client -} - -// WithDefaults hydrates default values in the create repo scale set params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *CreateRepoScaleSetParams) WithDefaults() *CreateRepoScaleSetParams { - o.SetDefaults() - return o -} - -// SetDefaults hydrates default values in the create repo scale set params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *CreateRepoScaleSetParams) SetDefaults() { - // no default values defined for this parameter -} - -// WithTimeout adds the timeout to the create repo scale set params -func (o *CreateRepoScaleSetParams) WithTimeout(timeout time.Duration) *CreateRepoScaleSetParams { - o.SetTimeout(timeout) - return o -} - -// SetTimeout adds the timeout to the create repo scale set params -func (o *CreateRepoScaleSetParams) SetTimeout(timeout time.Duration) { - o.timeout = timeout -} - -// WithContext adds the context to the create repo scale set params -func (o *CreateRepoScaleSetParams) WithContext(ctx context.Context) *CreateRepoScaleSetParams { - o.SetContext(ctx) - return o -} - -// SetContext adds the context to the create repo scale set params -func (o *CreateRepoScaleSetParams) SetContext(ctx context.Context) { - o.Context = ctx -} - -// WithHTTPClient adds the HTTPClient to the create repo scale set params -func (o *CreateRepoScaleSetParams) WithHTTPClient(client *http.Client) *CreateRepoScaleSetParams { - o.SetHTTPClient(client) - return o -} - -// SetHTTPClient adds the HTTPClient to the create repo scale set params -func (o *CreateRepoScaleSetParams) SetHTTPClient(client *http.Client) { - o.HTTPClient = client -} - -// WithBody adds the body to the create repo scale set params -func (o *CreateRepoScaleSetParams) WithBody(body garm_params.CreateScaleSetParams) *CreateRepoScaleSetParams { - o.SetBody(body) - return o -} - -// SetBody adds the body to the create repo scale set params -func (o *CreateRepoScaleSetParams) SetBody(body garm_params.CreateScaleSetParams) { - o.Body = body -} - -// WithRepoID adds the repoID to the create repo scale set params -func (o *CreateRepoScaleSetParams) WithRepoID(repoID string) *CreateRepoScaleSetParams { - o.SetRepoID(repoID) - return o -} - -// SetRepoID adds the repoId to the create repo scale set params -func (o *CreateRepoScaleSetParams) SetRepoID(repoID string) { - o.RepoID = repoID -} - -// WriteToRequest writes these params to a swagger request -func (o *CreateRepoScaleSetParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { - - if err := r.SetTimeout(o.timeout); err != nil { - return err - } - var res []error - if err := r.SetBodyParam(o.Body); err != nil { - return err - } - - // path param repoID - if err := r.SetPathParam("repoID", o.RepoID); err != nil { - return err - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} diff --git a/client/repositories/create_repo_scale_set_responses.go b/client/repositories/create_repo_scale_set_responses.go deleted file mode 100644 index 4d02d5c1..00000000 --- a/client/repositories/create_repo_scale_set_responses.go +++ /dev/null @@ -1,184 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package repositories - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "encoding/json" - "fmt" - "io" - - "github.com/go-openapi/runtime" - "github.com/go-openapi/strfmt" - - apiserver_params "github.com/cloudbase/garm/apiserver/params" - garm_params "github.com/cloudbase/garm/params" -) - -// CreateRepoScaleSetReader is a Reader for the CreateRepoScaleSet structure. -type CreateRepoScaleSetReader struct { - formats strfmt.Registry -} - -// ReadResponse reads a server response into the received o. -func (o *CreateRepoScaleSetReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { - switch response.Code() { - case 200: - result := NewCreateRepoScaleSetOK() - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - return result, nil - default: - result := NewCreateRepoScaleSetDefault(response.Code()) - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - if response.Code()/100 == 2 { - return result, nil - } - return nil, result - } -} - -// NewCreateRepoScaleSetOK creates a CreateRepoScaleSetOK with default headers values -func NewCreateRepoScaleSetOK() *CreateRepoScaleSetOK { - return &CreateRepoScaleSetOK{} -} - -/* -CreateRepoScaleSetOK describes a response with status code 200, with default header values. - -ScaleSet -*/ -type CreateRepoScaleSetOK struct { - Payload garm_params.ScaleSet -} - -// IsSuccess returns true when this create repo scale set o k response has a 2xx status code -func (o *CreateRepoScaleSetOK) IsSuccess() bool { - return true -} - -// IsRedirect returns true when this create repo scale set o k response has a 3xx status code -func (o *CreateRepoScaleSetOK) IsRedirect() bool { - return false -} - -// IsClientError returns true when this create repo scale set o k response has a 4xx status code -func (o *CreateRepoScaleSetOK) IsClientError() bool { - return false -} - -// IsServerError returns true when this create repo scale set o k response has a 5xx status code -func (o *CreateRepoScaleSetOK) IsServerError() bool { - return false -} - -// IsCode returns true when this create repo scale set o k response a status code equal to that given -func (o *CreateRepoScaleSetOK) IsCode(code int) bool { - return code == 200 -} - -// Code gets the status code for the create repo scale set o k response -func (o *CreateRepoScaleSetOK) Code() int { - return 200 -} - -func (o *CreateRepoScaleSetOK) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[POST /repositories/{repoID}/scalesets][%d] createRepoScaleSetOK %s", 200, payload) -} - -func (o *CreateRepoScaleSetOK) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[POST /repositories/{repoID}/scalesets][%d] createRepoScaleSetOK %s", 200, payload) -} - -func (o *CreateRepoScaleSetOK) GetPayload() garm_params.ScaleSet { - return o.Payload -} - -func (o *CreateRepoScaleSetOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} - -// NewCreateRepoScaleSetDefault creates a CreateRepoScaleSetDefault with default headers values -func NewCreateRepoScaleSetDefault(code int) *CreateRepoScaleSetDefault { - return &CreateRepoScaleSetDefault{ - _statusCode: code, - } -} - -/* -CreateRepoScaleSetDefault describes a response with status code -1, with default header values. - -APIErrorResponse -*/ -type CreateRepoScaleSetDefault struct { - _statusCode int - - Payload apiserver_params.APIErrorResponse -} - -// IsSuccess returns true when this create repo scale set default response has a 2xx status code -func (o *CreateRepoScaleSetDefault) IsSuccess() bool { - return o._statusCode/100 == 2 -} - -// IsRedirect returns true when this create repo scale set default response has a 3xx status code -func (o *CreateRepoScaleSetDefault) IsRedirect() bool { - return o._statusCode/100 == 3 -} - -// IsClientError returns true when this create repo scale set default response has a 4xx status code -func (o *CreateRepoScaleSetDefault) IsClientError() bool { - return o._statusCode/100 == 4 -} - -// IsServerError returns true when this create repo scale set default response has a 5xx status code -func (o *CreateRepoScaleSetDefault) IsServerError() bool { - return o._statusCode/100 == 5 -} - -// IsCode returns true when this create repo scale set default response a status code equal to that given -func (o *CreateRepoScaleSetDefault) IsCode(code int) bool { - return o._statusCode == code -} - -// Code gets the status code for the create repo scale set default response -func (o *CreateRepoScaleSetDefault) Code() int { - return o._statusCode -} - -func (o *CreateRepoScaleSetDefault) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[POST /repositories/{repoID}/scalesets][%d] CreateRepoScaleSet default %s", o._statusCode, payload) -} - -func (o *CreateRepoScaleSetDefault) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[POST /repositories/{repoID}/scalesets][%d] CreateRepoScaleSet default %s", o._statusCode, payload) -} - -func (o *CreateRepoScaleSetDefault) GetPayload() apiserver_params.APIErrorResponse { - return o.Payload -} - -func (o *CreateRepoScaleSetDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} diff --git a/client/repositories/list_repo_scale_sets_parameters.go b/client/repositories/list_repo_scale_sets_parameters.go deleted file mode 100644 index 2582c498..00000000 --- a/client/repositories/list_repo_scale_sets_parameters.go +++ /dev/null @@ -1,151 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package repositories - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - "net/http" - "time" - - "github.com/go-openapi/errors" - "github.com/go-openapi/runtime" - cr "github.com/go-openapi/runtime/client" - "github.com/go-openapi/strfmt" -) - -// NewListRepoScaleSetsParams creates a new ListRepoScaleSetsParams object, -// with the default timeout for this client. -// -// Default values are not hydrated, since defaults are normally applied by the API server side. -// -// To enforce default values in parameter, use SetDefaults or WithDefaults. -func NewListRepoScaleSetsParams() *ListRepoScaleSetsParams { - return &ListRepoScaleSetsParams{ - timeout: cr.DefaultTimeout, - } -} - -// NewListRepoScaleSetsParamsWithTimeout creates a new ListRepoScaleSetsParams object -// with the ability to set a timeout on a request. -func NewListRepoScaleSetsParamsWithTimeout(timeout time.Duration) *ListRepoScaleSetsParams { - return &ListRepoScaleSetsParams{ - timeout: timeout, - } -} - -// NewListRepoScaleSetsParamsWithContext creates a new ListRepoScaleSetsParams object -// with the ability to set a context for a request. -func NewListRepoScaleSetsParamsWithContext(ctx context.Context) *ListRepoScaleSetsParams { - return &ListRepoScaleSetsParams{ - Context: ctx, - } -} - -// NewListRepoScaleSetsParamsWithHTTPClient creates a new ListRepoScaleSetsParams object -// with the ability to set a custom HTTPClient for a request. -func NewListRepoScaleSetsParamsWithHTTPClient(client *http.Client) *ListRepoScaleSetsParams { - return &ListRepoScaleSetsParams{ - HTTPClient: client, - } -} - -/* -ListRepoScaleSetsParams contains all the parameters to send to the API endpoint - - for the list repo scale sets operation. - - Typically these are written to a http.Request. -*/ -type ListRepoScaleSetsParams struct { - - /* RepoID. - - Repository ID. - */ - RepoID string - - timeout time.Duration - Context context.Context - HTTPClient *http.Client -} - -// WithDefaults hydrates default values in the list repo scale sets params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *ListRepoScaleSetsParams) WithDefaults() *ListRepoScaleSetsParams { - o.SetDefaults() - return o -} - -// SetDefaults hydrates default values in the list repo scale sets params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *ListRepoScaleSetsParams) SetDefaults() { - // no default values defined for this parameter -} - -// WithTimeout adds the timeout to the list repo scale sets params -func (o *ListRepoScaleSetsParams) WithTimeout(timeout time.Duration) *ListRepoScaleSetsParams { - o.SetTimeout(timeout) - return o -} - -// SetTimeout adds the timeout to the list repo scale sets params -func (o *ListRepoScaleSetsParams) SetTimeout(timeout time.Duration) { - o.timeout = timeout -} - -// WithContext adds the context to the list repo scale sets params -func (o *ListRepoScaleSetsParams) WithContext(ctx context.Context) *ListRepoScaleSetsParams { - o.SetContext(ctx) - return o -} - -// SetContext adds the context to the list repo scale sets params -func (o *ListRepoScaleSetsParams) SetContext(ctx context.Context) { - o.Context = ctx -} - -// WithHTTPClient adds the HTTPClient to the list repo scale sets params -func (o *ListRepoScaleSetsParams) WithHTTPClient(client *http.Client) *ListRepoScaleSetsParams { - o.SetHTTPClient(client) - return o -} - -// SetHTTPClient adds the HTTPClient to the list repo scale sets params -func (o *ListRepoScaleSetsParams) SetHTTPClient(client *http.Client) { - o.HTTPClient = client -} - -// WithRepoID adds the repoID to the list repo scale sets params -func (o *ListRepoScaleSetsParams) WithRepoID(repoID string) *ListRepoScaleSetsParams { - o.SetRepoID(repoID) - return o -} - -// SetRepoID adds the repoId to the list repo scale sets params -func (o *ListRepoScaleSetsParams) SetRepoID(repoID string) { - o.RepoID = repoID -} - -// WriteToRequest writes these params to a swagger request -func (o *ListRepoScaleSetsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { - - if err := r.SetTimeout(o.timeout); err != nil { - return err - } - var res []error - - // path param repoID - if err := r.SetPathParam("repoID", o.RepoID); err != nil { - return err - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} diff --git a/client/repositories/list_repo_scale_sets_responses.go b/client/repositories/list_repo_scale_sets_responses.go deleted file mode 100644 index 4e2d98a2..00000000 --- a/client/repositories/list_repo_scale_sets_responses.go +++ /dev/null @@ -1,184 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package repositories - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "encoding/json" - "fmt" - "io" - - "github.com/go-openapi/runtime" - "github.com/go-openapi/strfmt" - - apiserver_params "github.com/cloudbase/garm/apiserver/params" - garm_params "github.com/cloudbase/garm/params" -) - -// ListRepoScaleSetsReader is a Reader for the ListRepoScaleSets structure. -type ListRepoScaleSetsReader struct { - formats strfmt.Registry -} - -// ReadResponse reads a server response into the received o. -func (o *ListRepoScaleSetsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { - switch response.Code() { - case 200: - result := NewListRepoScaleSetsOK() - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - return result, nil - default: - result := NewListRepoScaleSetsDefault(response.Code()) - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - if response.Code()/100 == 2 { - return result, nil - } - return nil, result - } -} - -// NewListRepoScaleSetsOK creates a ListRepoScaleSetsOK with default headers values -func NewListRepoScaleSetsOK() *ListRepoScaleSetsOK { - return &ListRepoScaleSetsOK{} -} - -/* -ListRepoScaleSetsOK describes a response with status code 200, with default header values. - -ScaleSets -*/ -type ListRepoScaleSetsOK struct { - Payload garm_params.ScaleSets -} - -// IsSuccess returns true when this list repo scale sets o k response has a 2xx status code -func (o *ListRepoScaleSetsOK) IsSuccess() bool { - return true -} - -// IsRedirect returns true when this list repo scale sets o k response has a 3xx status code -func (o *ListRepoScaleSetsOK) IsRedirect() bool { - return false -} - -// IsClientError returns true when this list repo scale sets o k response has a 4xx status code -func (o *ListRepoScaleSetsOK) IsClientError() bool { - return false -} - -// IsServerError returns true when this list repo scale sets o k response has a 5xx status code -func (o *ListRepoScaleSetsOK) IsServerError() bool { - return false -} - -// IsCode returns true when this list repo scale sets o k response a status code equal to that given -func (o *ListRepoScaleSetsOK) IsCode(code int) bool { - return code == 200 -} - -// Code gets the status code for the list repo scale sets o k response -func (o *ListRepoScaleSetsOK) Code() int { - return 200 -} - -func (o *ListRepoScaleSetsOK) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /repositories/{repoID}/scalesets][%d] listRepoScaleSetsOK %s", 200, payload) -} - -func (o *ListRepoScaleSetsOK) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /repositories/{repoID}/scalesets][%d] listRepoScaleSetsOK %s", 200, payload) -} - -func (o *ListRepoScaleSetsOK) GetPayload() garm_params.ScaleSets { - return o.Payload -} - -func (o *ListRepoScaleSetsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} - -// NewListRepoScaleSetsDefault creates a ListRepoScaleSetsDefault with default headers values -func NewListRepoScaleSetsDefault(code int) *ListRepoScaleSetsDefault { - return &ListRepoScaleSetsDefault{ - _statusCode: code, - } -} - -/* -ListRepoScaleSetsDefault describes a response with status code -1, with default header values. - -APIErrorResponse -*/ -type ListRepoScaleSetsDefault struct { - _statusCode int - - Payload apiserver_params.APIErrorResponse -} - -// IsSuccess returns true when this list repo scale sets default response has a 2xx status code -func (o *ListRepoScaleSetsDefault) IsSuccess() bool { - return o._statusCode/100 == 2 -} - -// IsRedirect returns true when this list repo scale sets default response has a 3xx status code -func (o *ListRepoScaleSetsDefault) IsRedirect() bool { - return o._statusCode/100 == 3 -} - -// IsClientError returns true when this list repo scale sets default response has a 4xx status code -func (o *ListRepoScaleSetsDefault) IsClientError() bool { - return o._statusCode/100 == 4 -} - -// IsServerError returns true when this list repo scale sets default response has a 5xx status code -func (o *ListRepoScaleSetsDefault) IsServerError() bool { - return o._statusCode/100 == 5 -} - -// IsCode returns true when this list repo scale sets default response a status code equal to that given -func (o *ListRepoScaleSetsDefault) IsCode(code int) bool { - return o._statusCode == code -} - -// Code gets the status code for the list repo scale sets default response -func (o *ListRepoScaleSetsDefault) Code() int { - return o._statusCode -} - -func (o *ListRepoScaleSetsDefault) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /repositories/{repoID}/scalesets][%d] ListRepoScaleSets default %s", o._statusCode, payload) -} - -func (o *ListRepoScaleSetsDefault) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /repositories/{repoID}/scalesets][%d] ListRepoScaleSets default %s", o._statusCode, payload) -} - -func (o *ListRepoScaleSetsDefault) GetPayload() apiserver_params.APIErrorResponse { - return o.Payload -} - -func (o *ListRepoScaleSetsDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} diff --git a/client/repositories/list_repos_parameters.go b/client/repositories/list_repos_parameters.go index 9998a1ba..f4e17d79 100644 --- a/client/repositories/list_repos_parameters.go +++ b/client/repositories/list_repos_parameters.go @@ -60,25 +60,6 @@ ListReposParams contains all the parameters to send to the API endpoint Typically these are written to a http.Request. */ type ListReposParams struct { - - /* Endpoint. - - Exact endpoint name to filter by - */ - Endpoint *string - - /* Name. - - Exact repository name to filter by - */ - Name *string - - /* Owner. - - Exact owner name to filter by - */ - Owner *string - timeout time.Duration Context context.Context HTTPClient *http.Client @@ -132,39 +113,6 @@ func (o *ListReposParams) SetHTTPClient(client *http.Client) { o.HTTPClient = client } -// WithEndpoint adds the endpoint to the list repos params -func (o *ListReposParams) WithEndpoint(endpoint *string) *ListReposParams { - o.SetEndpoint(endpoint) - return o -} - -// SetEndpoint adds the endpoint to the list repos params -func (o *ListReposParams) SetEndpoint(endpoint *string) { - o.Endpoint = endpoint -} - -// WithName adds the name to the list repos params -func (o *ListReposParams) WithName(name *string) *ListReposParams { - o.SetName(name) - return o -} - -// SetName adds the name to the list repos params -func (o *ListReposParams) SetName(name *string) { - o.Name = name -} - -// WithOwner adds the owner to the list repos params -func (o *ListReposParams) WithOwner(owner *string) *ListReposParams { - o.SetOwner(owner) - return o -} - -// SetOwner adds the owner to the list repos params -func (o *ListReposParams) SetOwner(owner *string) { - o.Owner = owner -} - // WriteToRequest writes these params to a swagger request func (o *ListReposParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { @@ -173,57 +121,6 @@ func (o *ListReposParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Reg } var res []error - if o.Endpoint != nil { - - // query param endpoint - var qrEndpoint string - - if o.Endpoint != nil { - qrEndpoint = *o.Endpoint - } - qEndpoint := qrEndpoint - if qEndpoint != "" { - - if err := r.SetQueryParam("endpoint", qEndpoint); err != nil { - return err - } - } - } - - if o.Name != nil { - - // query param name - var qrName string - - if o.Name != nil { - qrName = *o.Name - } - qName := qrName - if qName != "" { - - if err := r.SetQueryParam("name", qName); err != nil { - return err - } - } - } - - if o.Owner != nil { - - // query param owner - var qrOwner string - - if o.Owner != nil { - qrOwner = *o.Owner - } - qOwner := qrOwner - if qOwner != "" { - - if err := r.SetQueryParam("owner", qOwner); err != nil { - return err - } - } - } - if len(res) > 0 { return errors.CompositeValidationError(res...) } diff --git a/client/repositories/repositories_client.go b/client/repositories/repositories_client.go index 017bf0f8..56a0a684 100644 --- a/client/repositories/repositories_client.go +++ b/client/repositories/repositories_client.go @@ -58,8 +58,6 @@ type ClientService interface { CreateRepoPool(params *CreateRepoPoolParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateRepoPoolOK, error) - CreateRepoScaleSet(params *CreateRepoScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateRepoScaleSetOK, error) - DeleteRepo(params *DeleteRepoParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error DeleteRepoPool(params *DeleteRepoPoolParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error @@ -76,8 +74,6 @@ type ClientService interface { ListRepoPools(params *ListRepoPoolsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListRepoPoolsOK, error) - ListRepoScaleSets(params *ListRepoScaleSetsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListRepoScaleSetsOK, error) - ListRepos(params *ListReposParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListReposOK, error) UninstallRepoWebhook(params *UninstallRepoWebhookParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error @@ -165,44 +161,6 @@ func (a *Client) CreateRepoPool(params *CreateRepoPoolParams, authInfo runtime.C return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } -/* -CreateRepoScaleSet creates repository scale set with the parameters given -*/ -func (a *Client) CreateRepoScaleSet(params *CreateRepoScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateRepoScaleSetOK, error) { - // TODO: Validate the params before sending - if params == nil { - params = NewCreateRepoScaleSetParams() - } - op := &runtime.ClientOperation{ - ID: "CreateRepoScaleSet", - Method: "POST", - PathPattern: "/repositories/{repoID}/scalesets", - ProducesMediaTypes: []string{"application/json"}, - ConsumesMediaTypes: []string{"application/json"}, - Schemes: []string{"http"}, - Params: params, - Reader: &CreateRepoScaleSetReader{formats: a.formats}, - AuthInfo: authInfo, - Context: params.Context, - Client: params.HTTPClient, - } - for _, opt := range opts { - opt(op) - } - - result, err := a.transport.Submit(op) - if err != nil { - return nil, err - } - success, ok := result.(*CreateRepoScaleSetOK) - if ok { - return success, nil - } - // unexpected success response - unexpectedSuccess := result.(*CreateRepoScaleSetDefault) - return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) -} - /* DeleteRepo deletes repository by ID */ @@ -497,44 +455,6 @@ func (a *Client) ListRepoPools(params *ListRepoPoolsParams, authInfo runtime.Cli return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } -/* -ListRepoScaleSets lists repository scale sets -*/ -func (a *Client) ListRepoScaleSets(params *ListRepoScaleSetsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListRepoScaleSetsOK, error) { - // TODO: Validate the params before sending - if params == nil { - params = NewListRepoScaleSetsParams() - } - op := &runtime.ClientOperation{ - ID: "ListRepoScaleSets", - Method: "GET", - PathPattern: "/repositories/{repoID}/scalesets", - ProducesMediaTypes: []string{"application/json"}, - ConsumesMediaTypes: []string{"application/json"}, - Schemes: []string{"http"}, - Params: params, - Reader: &ListRepoScaleSetsReader{formats: a.formats}, - AuthInfo: authInfo, - Context: params.Context, - Client: params.HTTPClient, - } - for _, opt := range opts { - opt(op) - } - - result, err := a.transport.Submit(op) - if err != nil { - return nil, err - } - success, ok := result.(*ListRepoScaleSetsOK) - if ok { - return success, nil - } - // unexpected success response - unexpectedSuccess := result.(*ListRepoScaleSetsDefault) - return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) -} - /* ListRepos lists repositories */ diff --git a/client/scalesets/delete_scale_set_parameters.go b/client/scalesets/delete_scale_set_parameters.go deleted file mode 100644 index 640f95a8..00000000 --- a/client/scalesets/delete_scale_set_parameters.go +++ /dev/null @@ -1,151 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package scalesets - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - "net/http" - "time" - - "github.com/go-openapi/errors" - "github.com/go-openapi/runtime" - cr "github.com/go-openapi/runtime/client" - "github.com/go-openapi/strfmt" -) - -// NewDeleteScaleSetParams creates a new DeleteScaleSetParams object, -// with the default timeout for this client. -// -// Default values are not hydrated, since defaults are normally applied by the API server side. -// -// To enforce default values in parameter, use SetDefaults or WithDefaults. -func NewDeleteScaleSetParams() *DeleteScaleSetParams { - return &DeleteScaleSetParams{ - timeout: cr.DefaultTimeout, - } -} - -// NewDeleteScaleSetParamsWithTimeout creates a new DeleteScaleSetParams object -// with the ability to set a timeout on a request. -func NewDeleteScaleSetParamsWithTimeout(timeout time.Duration) *DeleteScaleSetParams { - return &DeleteScaleSetParams{ - timeout: timeout, - } -} - -// NewDeleteScaleSetParamsWithContext creates a new DeleteScaleSetParams object -// with the ability to set a context for a request. -func NewDeleteScaleSetParamsWithContext(ctx context.Context) *DeleteScaleSetParams { - return &DeleteScaleSetParams{ - Context: ctx, - } -} - -// NewDeleteScaleSetParamsWithHTTPClient creates a new DeleteScaleSetParams object -// with the ability to set a custom HTTPClient for a request. -func NewDeleteScaleSetParamsWithHTTPClient(client *http.Client) *DeleteScaleSetParams { - return &DeleteScaleSetParams{ - HTTPClient: client, - } -} - -/* -DeleteScaleSetParams contains all the parameters to send to the API endpoint - - for the delete scale set operation. - - Typically these are written to a http.Request. -*/ -type DeleteScaleSetParams struct { - - /* ScalesetID. - - ID of the scale set to delete. - */ - ScalesetID string - - timeout time.Duration - Context context.Context - HTTPClient *http.Client -} - -// WithDefaults hydrates default values in the delete scale set params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *DeleteScaleSetParams) WithDefaults() *DeleteScaleSetParams { - o.SetDefaults() - return o -} - -// SetDefaults hydrates default values in the delete scale set params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *DeleteScaleSetParams) SetDefaults() { - // no default values defined for this parameter -} - -// WithTimeout adds the timeout to the delete scale set params -func (o *DeleteScaleSetParams) WithTimeout(timeout time.Duration) *DeleteScaleSetParams { - o.SetTimeout(timeout) - return o -} - -// SetTimeout adds the timeout to the delete scale set params -func (o *DeleteScaleSetParams) SetTimeout(timeout time.Duration) { - o.timeout = timeout -} - -// WithContext adds the context to the delete scale set params -func (o *DeleteScaleSetParams) WithContext(ctx context.Context) *DeleteScaleSetParams { - o.SetContext(ctx) - return o -} - -// SetContext adds the context to the delete scale set params -func (o *DeleteScaleSetParams) SetContext(ctx context.Context) { - o.Context = ctx -} - -// WithHTTPClient adds the HTTPClient to the delete scale set params -func (o *DeleteScaleSetParams) WithHTTPClient(client *http.Client) *DeleteScaleSetParams { - o.SetHTTPClient(client) - return o -} - -// SetHTTPClient adds the HTTPClient to the delete scale set params -func (o *DeleteScaleSetParams) SetHTTPClient(client *http.Client) { - o.HTTPClient = client -} - -// WithScalesetID adds the scalesetID to the delete scale set params -func (o *DeleteScaleSetParams) WithScalesetID(scalesetID string) *DeleteScaleSetParams { - o.SetScalesetID(scalesetID) - return o -} - -// SetScalesetID adds the scalesetId to the delete scale set params -func (o *DeleteScaleSetParams) SetScalesetID(scalesetID string) { - o.ScalesetID = scalesetID -} - -// WriteToRequest writes these params to a swagger request -func (o *DeleteScaleSetParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { - - if err := r.SetTimeout(o.timeout); err != nil { - return err - } - var res []error - - // path param scalesetID - if err := r.SetPathParam("scalesetID", o.ScalesetID); err != nil { - return err - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} diff --git a/client/scalesets/delete_scale_set_responses.go b/client/scalesets/delete_scale_set_responses.go deleted file mode 100644 index dd0f7334..00000000 --- a/client/scalesets/delete_scale_set_responses.go +++ /dev/null @@ -1,106 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package scalesets - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "encoding/json" - "fmt" - "io" - - "github.com/go-openapi/runtime" - "github.com/go-openapi/strfmt" - - apiserver_params "github.com/cloudbase/garm/apiserver/params" -) - -// DeleteScaleSetReader is a Reader for the DeleteScaleSet structure. -type DeleteScaleSetReader struct { - formats strfmt.Registry -} - -// ReadResponse reads a server response into the received o. -func (o *DeleteScaleSetReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { - result := NewDeleteScaleSetDefault(response.Code()) - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - if response.Code()/100 == 2 { - return result, nil - } - return nil, result -} - -// NewDeleteScaleSetDefault creates a DeleteScaleSetDefault with default headers values -func NewDeleteScaleSetDefault(code int) *DeleteScaleSetDefault { - return &DeleteScaleSetDefault{ - _statusCode: code, - } -} - -/* -DeleteScaleSetDefault describes a response with status code -1, with default header values. - -APIErrorResponse -*/ -type DeleteScaleSetDefault struct { - _statusCode int - - Payload apiserver_params.APIErrorResponse -} - -// IsSuccess returns true when this delete scale set default response has a 2xx status code -func (o *DeleteScaleSetDefault) IsSuccess() bool { - return o._statusCode/100 == 2 -} - -// IsRedirect returns true when this delete scale set default response has a 3xx status code -func (o *DeleteScaleSetDefault) IsRedirect() bool { - return o._statusCode/100 == 3 -} - -// IsClientError returns true when this delete scale set default response has a 4xx status code -func (o *DeleteScaleSetDefault) IsClientError() bool { - return o._statusCode/100 == 4 -} - -// IsServerError returns true when this delete scale set default response has a 5xx status code -func (o *DeleteScaleSetDefault) IsServerError() bool { - return o._statusCode/100 == 5 -} - -// IsCode returns true when this delete scale set default response a status code equal to that given -func (o *DeleteScaleSetDefault) IsCode(code int) bool { - return o._statusCode == code -} - -// Code gets the status code for the delete scale set default response -func (o *DeleteScaleSetDefault) Code() int { - return o._statusCode -} - -func (o *DeleteScaleSetDefault) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[DELETE /scalesets/{scalesetID}][%d] DeleteScaleSet default %s", o._statusCode, payload) -} - -func (o *DeleteScaleSetDefault) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[DELETE /scalesets/{scalesetID}][%d] DeleteScaleSet default %s", o._statusCode, payload) -} - -func (o *DeleteScaleSetDefault) GetPayload() apiserver_params.APIErrorResponse { - return o.Payload -} - -func (o *DeleteScaleSetDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} diff --git a/client/scalesets/get_scale_set_parameters.go b/client/scalesets/get_scale_set_parameters.go deleted file mode 100644 index 9e31b46e..00000000 --- a/client/scalesets/get_scale_set_parameters.go +++ /dev/null @@ -1,151 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package scalesets - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - "net/http" - "time" - - "github.com/go-openapi/errors" - "github.com/go-openapi/runtime" - cr "github.com/go-openapi/runtime/client" - "github.com/go-openapi/strfmt" -) - -// NewGetScaleSetParams creates a new GetScaleSetParams object, -// with the default timeout for this client. -// -// Default values are not hydrated, since defaults are normally applied by the API server side. -// -// To enforce default values in parameter, use SetDefaults or WithDefaults. -func NewGetScaleSetParams() *GetScaleSetParams { - return &GetScaleSetParams{ - timeout: cr.DefaultTimeout, - } -} - -// NewGetScaleSetParamsWithTimeout creates a new GetScaleSetParams object -// with the ability to set a timeout on a request. -func NewGetScaleSetParamsWithTimeout(timeout time.Duration) *GetScaleSetParams { - return &GetScaleSetParams{ - timeout: timeout, - } -} - -// NewGetScaleSetParamsWithContext creates a new GetScaleSetParams object -// with the ability to set a context for a request. -func NewGetScaleSetParamsWithContext(ctx context.Context) *GetScaleSetParams { - return &GetScaleSetParams{ - Context: ctx, - } -} - -// NewGetScaleSetParamsWithHTTPClient creates a new GetScaleSetParams object -// with the ability to set a custom HTTPClient for a request. -func NewGetScaleSetParamsWithHTTPClient(client *http.Client) *GetScaleSetParams { - return &GetScaleSetParams{ - HTTPClient: client, - } -} - -/* -GetScaleSetParams contains all the parameters to send to the API endpoint - - for the get scale set operation. - - Typically these are written to a http.Request. -*/ -type GetScaleSetParams struct { - - /* ScalesetID. - - ID of the scale set to fetch. - */ - ScalesetID string - - timeout time.Duration - Context context.Context - HTTPClient *http.Client -} - -// WithDefaults hydrates default values in the get scale set params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *GetScaleSetParams) WithDefaults() *GetScaleSetParams { - o.SetDefaults() - return o -} - -// SetDefaults hydrates default values in the get scale set params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *GetScaleSetParams) SetDefaults() { - // no default values defined for this parameter -} - -// WithTimeout adds the timeout to the get scale set params -func (o *GetScaleSetParams) WithTimeout(timeout time.Duration) *GetScaleSetParams { - o.SetTimeout(timeout) - return o -} - -// SetTimeout adds the timeout to the get scale set params -func (o *GetScaleSetParams) SetTimeout(timeout time.Duration) { - o.timeout = timeout -} - -// WithContext adds the context to the get scale set params -func (o *GetScaleSetParams) WithContext(ctx context.Context) *GetScaleSetParams { - o.SetContext(ctx) - return o -} - -// SetContext adds the context to the get scale set params -func (o *GetScaleSetParams) SetContext(ctx context.Context) { - o.Context = ctx -} - -// WithHTTPClient adds the HTTPClient to the get scale set params -func (o *GetScaleSetParams) WithHTTPClient(client *http.Client) *GetScaleSetParams { - o.SetHTTPClient(client) - return o -} - -// SetHTTPClient adds the HTTPClient to the get scale set params -func (o *GetScaleSetParams) SetHTTPClient(client *http.Client) { - o.HTTPClient = client -} - -// WithScalesetID adds the scalesetID to the get scale set params -func (o *GetScaleSetParams) WithScalesetID(scalesetID string) *GetScaleSetParams { - o.SetScalesetID(scalesetID) - return o -} - -// SetScalesetID adds the scalesetId to the get scale set params -func (o *GetScaleSetParams) SetScalesetID(scalesetID string) { - o.ScalesetID = scalesetID -} - -// WriteToRequest writes these params to a swagger request -func (o *GetScaleSetParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { - - if err := r.SetTimeout(o.timeout); err != nil { - return err - } - var res []error - - // path param scalesetID - if err := r.SetPathParam("scalesetID", o.ScalesetID); err != nil { - return err - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} diff --git a/client/scalesets/get_scale_set_responses.go b/client/scalesets/get_scale_set_responses.go deleted file mode 100644 index 5b30e16f..00000000 --- a/client/scalesets/get_scale_set_responses.go +++ /dev/null @@ -1,184 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package scalesets - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "encoding/json" - "fmt" - "io" - - "github.com/go-openapi/runtime" - "github.com/go-openapi/strfmt" - - apiserver_params "github.com/cloudbase/garm/apiserver/params" - garm_params "github.com/cloudbase/garm/params" -) - -// GetScaleSetReader is a Reader for the GetScaleSet structure. -type GetScaleSetReader struct { - formats strfmt.Registry -} - -// ReadResponse reads a server response into the received o. -func (o *GetScaleSetReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { - switch response.Code() { - case 200: - result := NewGetScaleSetOK() - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - return result, nil - default: - result := NewGetScaleSetDefault(response.Code()) - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - if response.Code()/100 == 2 { - return result, nil - } - return nil, result - } -} - -// NewGetScaleSetOK creates a GetScaleSetOK with default headers values -func NewGetScaleSetOK() *GetScaleSetOK { - return &GetScaleSetOK{} -} - -/* -GetScaleSetOK describes a response with status code 200, with default header values. - -ScaleSet -*/ -type GetScaleSetOK struct { - Payload garm_params.ScaleSet -} - -// IsSuccess returns true when this get scale set o k response has a 2xx status code -func (o *GetScaleSetOK) IsSuccess() bool { - return true -} - -// IsRedirect returns true when this get scale set o k response has a 3xx status code -func (o *GetScaleSetOK) IsRedirect() bool { - return false -} - -// IsClientError returns true when this get scale set o k response has a 4xx status code -func (o *GetScaleSetOK) IsClientError() bool { - return false -} - -// IsServerError returns true when this get scale set o k response has a 5xx status code -func (o *GetScaleSetOK) IsServerError() bool { - return false -} - -// IsCode returns true when this get scale set o k response a status code equal to that given -func (o *GetScaleSetOK) IsCode(code int) bool { - return code == 200 -} - -// Code gets the status code for the get scale set o k response -func (o *GetScaleSetOK) Code() int { - return 200 -} - -func (o *GetScaleSetOK) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /scalesets/{scalesetID}][%d] getScaleSetOK %s", 200, payload) -} - -func (o *GetScaleSetOK) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /scalesets/{scalesetID}][%d] getScaleSetOK %s", 200, payload) -} - -func (o *GetScaleSetOK) GetPayload() garm_params.ScaleSet { - return o.Payload -} - -func (o *GetScaleSetOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} - -// NewGetScaleSetDefault creates a GetScaleSetDefault with default headers values -func NewGetScaleSetDefault(code int) *GetScaleSetDefault { - return &GetScaleSetDefault{ - _statusCode: code, - } -} - -/* -GetScaleSetDefault describes a response with status code -1, with default header values. - -APIErrorResponse -*/ -type GetScaleSetDefault struct { - _statusCode int - - Payload apiserver_params.APIErrorResponse -} - -// IsSuccess returns true when this get scale set default response has a 2xx status code -func (o *GetScaleSetDefault) IsSuccess() bool { - return o._statusCode/100 == 2 -} - -// IsRedirect returns true when this get scale set default response has a 3xx status code -func (o *GetScaleSetDefault) IsRedirect() bool { - return o._statusCode/100 == 3 -} - -// IsClientError returns true when this get scale set default response has a 4xx status code -func (o *GetScaleSetDefault) IsClientError() bool { - return o._statusCode/100 == 4 -} - -// IsServerError returns true when this get scale set default response has a 5xx status code -func (o *GetScaleSetDefault) IsServerError() bool { - return o._statusCode/100 == 5 -} - -// IsCode returns true when this get scale set default response a status code equal to that given -func (o *GetScaleSetDefault) IsCode(code int) bool { - return o._statusCode == code -} - -// Code gets the status code for the get scale set default response -func (o *GetScaleSetDefault) Code() int { - return o._statusCode -} - -func (o *GetScaleSetDefault) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /scalesets/{scalesetID}][%d] GetScaleSet default %s", o._statusCode, payload) -} - -func (o *GetScaleSetDefault) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /scalesets/{scalesetID}][%d] GetScaleSet default %s", o._statusCode, payload) -} - -func (o *GetScaleSetDefault) GetPayload() apiserver_params.APIErrorResponse { - return o.Payload -} - -func (o *GetScaleSetDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} diff --git a/client/scalesets/list_scalesets_parameters.go b/client/scalesets/list_scalesets_parameters.go deleted file mode 100644 index b6fd1ccb..00000000 --- a/client/scalesets/list_scalesets_parameters.go +++ /dev/null @@ -1,128 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package scalesets - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - "net/http" - "time" - - "github.com/go-openapi/errors" - "github.com/go-openapi/runtime" - cr "github.com/go-openapi/runtime/client" - "github.com/go-openapi/strfmt" -) - -// NewListScalesetsParams creates a new ListScalesetsParams object, -// with the default timeout for this client. -// -// Default values are not hydrated, since defaults are normally applied by the API server side. -// -// To enforce default values in parameter, use SetDefaults or WithDefaults. -func NewListScalesetsParams() *ListScalesetsParams { - return &ListScalesetsParams{ - timeout: cr.DefaultTimeout, - } -} - -// NewListScalesetsParamsWithTimeout creates a new ListScalesetsParams object -// with the ability to set a timeout on a request. -func NewListScalesetsParamsWithTimeout(timeout time.Duration) *ListScalesetsParams { - return &ListScalesetsParams{ - timeout: timeout, - } -} - -// NewListScalesetsParamsWithContext creates a new ListScalesetsParams object -// with the ability to set a context for a request. -func NewListScalesetsParamsWithContext(ctx context.Context) *ListScalesetsParams { - return &ListScalesetsParams{ - Context: ctx, - } -} - -// NewListScalesetsParamsWithHTTPClient creates a new ListScalesetsParams object -// with the ability to set a custom HTTPClient for a request. -func NewListScalesetsParamsWithHTTPClient(client *http.Client) *ListScalesetsParams { - return &ListScalesetsParams{ - HTTPClient: client, - } -} - -/* -ListScalesetsParams contains all the parameters to send to the API endpoint - - for the list scalesets operation. - - Typically these are written to a http.Request. -*/ -type ListScalesetsParams struct { - timeout time.Duration - Context context.Context - HTTPClient *http.Client -} - -// WithDefaults hydrates default values in the list scalesets params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *ListScalesetsParams) WithDefaults() *ListScalesetsParams { - o.SetDefaults() - return o -} - -// SetDefaults hydrates default values in the list scalesets params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *ListScalesetsParams) SetDefaults() { - // no default values defined for this parameter -} - -// WithTimeout adds the timeout to the list scalesets params -func (o *ListScalesetsParams) WithTimeout(timeout time.Duration) *ListScalesetsParams { - o.SetTimeout(timeout) - return o -} - -// SetTimeout adds the timeout to the list scalesets params -func (o *ListScalesetsParams) SetTimeout(timeout time.Duration) { - o.timeout = timeout -} - -// WithContext adds the context to the list scalesets params -func (o *ListScalesetsParams) WithContext(ctx context.Context) *ListScalesetsParams { - o.SetContext(ctx) - return o -} - -// SetContext adds the context to the list scalesets params -func (o *ListScalesetsParams) SetContext(ctx context.Context) { - o.Context = ctx -} - -// WithHTTPClient adds the HTTPClient to the list scalesets params -func (o *ListScalesetsParams) WithHTTPClient(client *http.Client) *ListScalesetsParams { - o.SetHTTPClient(client) - return o -} - -// SetHTTPClient adds the HTTPClient to the list scalesets params -func (o *ListScalesetsParams) SetHTTPClient(client *http.Client) { - o.HTTPClient = client -} - -// WriteToRequest writes these params to a swagger request -func (o *ListScalesetsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { - - if err := r.SetTimeout(o.timeout); err != nil { - return err - } - var res []error - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} diff --git a/client/scalesets/list_scalesets_responses.go b/client/scalesets/list_scalesets_responses.go deleted file mode 100644 index 05064308..00000000 --- a/client/scalesets/list_scalesets_responses.go +++ /dev/null @@ -1,184 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package scalesets - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "encoding/json" - "fmt" - "io" - - "github.com/go-openapi/runtime" - "github.com/go-openapi/strfmt" - - apiserver_params "github.com/cloudbase/garm/apiserver/params" - garm_params "github.com/cloudbase/garm/params" -) - -// ListScalesetsReader is a Reader for the ListScalesets structure. -type ListScalesetsReader struct { - formats strfmt.Registry -} - -// ReadResponse reads a server response into the received o. -func (o *ListScalesetsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { - switch response.Code() { - case 200: - result := NewListScalesetsOK() - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - return result, nil - default: - result := NewListScalesetsDefault(response.Code()) - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - if response.Code()/100 == 2 { - return result, nil - } - return nil, result - } -} - -// NewListScalesetsOK creates a ListScalesetsOK with default headers values -func NewListScalesetsOK() *ListScalesetsOK { - return &ListScalesetsOK{} -} - -/* -ListScalesetsOK describes a response with status code 200, with default header values. - -ScaleSets -*/ -type ListScalesetsOK struct { - Payload garm_params.ScaleSets -} - -// IsSuccess returns true when this list scalesets o k response has a 2xx status code -func (o *ListScalesetsOK) IsSuccess() bool { - return true -} - -// IsRedirect returns true when this list scalesets o k response has a 3xx status code -func (o *ListScalesetsOK) IsRedirect() bool { - return false -} - -// IsClientError returns true when this list scalesets o k response has a 4xx status code -func (o *ListScalesetsOK) IsClientError() bool { - return false -} - -// IsServerError returns true when this list scalesets o k response has a 5xx status code -func (o *ListScalesetsOK) IsServerError() bool { - return false -} - -// IsCode returns true when this list scalesets o k response a status code equal to that given -func (o *ListScalesetsOK) IsCode(code int) bool { - return code == 200 -} - -// Code gets the status code for the list scalesets o k response -func (o *ListScalesetsOK) Code() int { - return 200 -} - -func (o *ListScalesetsOK) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /scalesets][%d] listScalesetsOK %s", 200, payload) -} - -func (o *ListScalesetsOK) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /scalesets][%d] listScalesetsOK %s", 200, payload) -} - -func (o *ListScalesetsOK) GetPayload() garm_params.ScaleSets { - return o.Payload -} - -func (o *ListScalesetsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} - -// NewListScalesetsDefault creates a ListScalesetsDefault with default headers values -func NewListScalesetsDefault(code int) *ListScalesetsDefault { - return &ListScalesetsDefault{ - _statusCode: code, - } -} - -/* -ListScalesetsDefault describes a response with status code -1, with default header values. - -APIErrorResponse -*/ -type ListScalesetsDefault struct { - _statusCode int - - Payload apiserver_params.APIErrorResponse -} - -// IsSuccess returns true when this list scalesets default response has a 2xx status code -func (o *ListScalesetsDefault) IsSuccess() bool { - return o._statusCode/100 == 2 -} - -// IsRedirect returns true when this list scalesets default response has a 3xx status code -func (o *ListScalesetsDefault) IsRedirect() bool { - return o._statusCode/100 == 3 -} - -// IsClientError returns true when this list scalesets default response has a 4xx status code -func (o *ListScalesetsDefault) IsClientError() bool { - return o._statusCode/100 == 4 -} - -// IsServerError returns true when this list scalesets default response has a 5xx status code -func (o *ListScalesetsDefault) IsServerError() bool { - return o._statusCode/100 == 5 -} - -// IsCode returns true when this list scalesets default response a status code equal to that given -func (o *ListScalesetsDefault) IsCode(code int) bool { - return o._statusCode == code -} - -// Code gets the status code for the list scalesets default response -func (o *ListScalesetsDefault) Code() int { - return o._statusCode -} - -func (o *ListScalesetsDefault) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /scalesets][%d] ListScalesets default %s", o._statusCode, payload) -} - -func (o *ListScalesetsDefault) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[GET /scalesets][%d] ListScalesets default %s", o._statusCode, payload) -} - -func (o *ListScalesetsDefault) GetPayload() apiserver_params.APIErrorResponse { - return o.Payload -} - -func (o *ListScalesetsDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} diff --git a/client/scalesets/scalesets_client.go b/client/scalesets/scalesets_client.go deleted file mode 100644 index 5375750d..00000000 --- a/client/scalesets/scalesets_client.go +++ /dev/null @@ -1,217 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package scalesets - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "github.com/go-openapi/runtime" - httptransport "github.com/go-openapi/runtime/client" - "github.com/go-openapi/strfmt" -) - -// New creates a new scalesets API client. -func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService { - return &Client{transport: transport, formats: formats} -} - -// New creates a new scalesets API client with basic auth credentials. -// It takes the following parameters: -// - host: http host (github.com). -// - basePath: any base path for the API client ("/v1", "/v3"). -// - scheme: http scheme ("http", "https"). -// - user: user for basic authentication header. -// - password: password for basic authentication header. -func NewClientWithBasicAuth(host, basePath, scheme, user, password string) ClientService { - transport := httptransport.New(host, basePath, []string{scheme}) - transport.DefaultAuthentication = httptransport.BasicAuth(user, password) - return &Client{transport: transport, formats: strfmt.Default} -} - -// New creates a new scalesets API client with a bearer token for authentication. -// It takes the following parameters: -// - host: http host (github.com). -// - basePath: any base path for the API client ("/v1", "/v3"). -// - scheme: http scheme ("http", "https"). -// - bearerToken: bearer token for Bearer authentication header. -func NewClientWithBearerToken(host, basePath, scheme, bearerToken string) ClientService { - transport := httptransport.New(host, basePath, []string{scheme}) - transport.DefaultAuthentication = httptransport.BearerToken(bearerToken) - return &Client{transport: transport, formats: strfmt.Default} -} - -/* -Client for scalesets API -*/ -type Client struct { - transport runtime.ClientTransport - formats strfmt.Registry -} - -// ClientOption may be used to customize the behavior of Client methods. -type ClientOption func(*runtime.ClientOperation) - -// ClientService is the interface for Client methods -type ClientService interface { - DeleteScaleSet(params *DeleteScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error - - GetScaleSet(params *GetScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*GetScaleSetOK, error) - - ListScalesets(params *ListScalesetsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListScalesetsOK, error) - - UpdateScaleSet(params *UpdateScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UpdateScaleSetOK, error) - - SetTransport(transport runtime.ClientTransport) -} - -/* -DeleteScaleSet deletes scale set by ID -*/ -func (a *Client) DeleteScaleSet(params *DeleteScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error { - // TODO: Validate the params before sending - if params == nil { - params = NewDeleteScaleSetParams() - } - op := &runtime.ClientOperation{ - ID: "DeleteScaleSet", - Method: "DELETE", - PathPattern: "/scalesets/{scalesetID}", - ProducesMediaTypes: []string{"application/json"}, - ConsumesMediaTypes: []string{"application/json"}, - Schemes: []string{"http"}, - Params: params, - Reader: &DeleteScaleSetReader{formats: a.formats}, - AuthInfo: authInfo, - Context: params.Context, - Client: params.HTTPClient, - } - for _, opt := range opts { - opt(op) - } - - _, err := a.transport.Submit(op) - if err != nil { - return err - } - return nil -} - -/* -GetScaleSet gets scale set by ID -*/ -func (a *Client) GetScaleSet(params *GetScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*GetScaleSetOK, error) { - // TODO: Validate the params before sending - if params == nil { - params = NewGetScaleSetParams() - } - op := &runtime.ClientOperation{ - ID: "GetScaleSet", - Method: "GET", - PathPattern: "/scalesets/{scalesetID}", - ProducesMediaTypes: []string{"application/json"}, - ConsumesMediaTypes: []string{"application/json"}, - Schemes: []string{"http"}, - Params: params, - Reader: &GetScaleSetReader{formats: a.formats}, - AuthInfo: authInfo, - Context: params.Context, - Client: params.HTTPClient, - } - for _, opt := range opts { - opt(op) - } - - result, err := a.transport.Submit(op) - if err != nil { - return nil, err - } - success, ok := result.(*GetScaleSetOK) - if ok { - return success, nil - } - // unexpected success response - unexpectedSuccess := result.(*GetScaleSetDefault) - return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) -} - -/* -ListScalesets lists all scalesets -*/ -func (a *Client) ListScalesets(params *ListScalesetsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListScalesetsOK, error) { - // TODO: Validate the params before sending - if params == nil { - params = NewListScalesetsParams() - } - op := &runtime.ClientOperation{ - ID: "ListScalesets", - Method: "GET", - PathPattern: "/scalesets", - ProducesMediaTypes: []string{"application/json"}, - ConsumesMediaTypes: []string{"application/json"}, - Schemes: []string{"http"}, - Params: params, - Reader: &ListScalesetsReader{formats: a.formats}, - AuthInfo: authInfo, - Context: params.Context, - Client: params.HTTPClient, - } - for _, opt := range opts { - opt(op) - } - - result, err := a.transport.Submit(op) - if err != nil { - return nil, err - } - success, ok := result.(*ListScalesetsOK) - if ok { - return success, nil - } - // unexpected success response - unexpectedSuccess := result.(*ListScalesetsDefault) - return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) -} - -/* -UpdateScaleSet updates scale set by ID -*/ -func (a *Client) UpdateScaleSet(params *UpdateScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UpdateScaleSetOK, error) { - // TODO: Validate the params before sending - if params == nil { - params = NewUpdateScaleSetParams() - } - op := &runtime.ClientOperation{ - ID: "UpdateScaleSet", - Method: "PUT", - PathPattern: "/scalesets/{scalesetID}", - ProducesMediaTypes: []string{"application/json"}, - ConsumesMediaTypes: []string{"application/json"}, - Schemes: []string{"http"}, - Params: params, - Reader: &UpdateScaleSetReader{formats: a.formats}, - AuthInfo: authInfo, - Context: params.Context, - Client: params.HTTPClient, - } - for _, opt := range opts { - opt(op) - } - - result, err := a.transport.Submit(op) - if err != nil { - return nil, err - } - success, ok := result.(*UpdateScaleSetOK) - if ok { - return success, nil - } - // unexpected success response - unexpectedSuccess := result.(*UpdateScaleSetDefault) - return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) -} - -// SetTransport changes the transport on the client -func (a *Client) SetTransport(transport runtime.ClientTransport) { - a.transport = transport -} diff --git a/client/scalesets/update_scale_set_parameters.go b/client/scalesets/update_scale_set_parameters.go deleted file mode 100644 index 39668e9b..00000000 --- a/client/scalesets/update_scale_set_parameters.go +++ /dev/null @@ -1,173 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package scalesets - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - "net/http" - "time" - - "github.com/go-openapi/errors" - "github.com/go-openapi/runtime" - cr "github.com/go-openapi/runtime/client" - "github.com/go-openapi/strfmt" - - garm_params "github.com/cloudbase/garm/params" -) - -// NewUpdateScaleSetParams creates a new UpdateScaleSetParams object, -// with the default timeout for this client. -// -// Default values are not hydrated, since defaults are normally applied by the API server side. -// -// To enforce default values in parameter, use SetDefaults or WithDefaults. -func NewUpdateScaleSetParams() *UpdateScaleSetParams { - return &UpdateScaleSetParams{ - timeout: cr.DefaultTimeout, - } -} - -// NewUpdateScaleSetParamsWithTimeout creates a new UpdateScaleSetParams object -// with the ability to set a timeout on a request. -func NewUpdateScaleSetParamsWithTimeout(timeout time.Duration) *UpdateScaleSetParams { - return &UpdateScaleSetParams{ - timeout: timeout, - } -} - -// NewUpdateScaleSetParamsWithContext creates a new UpdateScaleSetParams object -// with the ability to set a context for a request. -func NewUpdateScaleSetParamsWithContext(ctx context.Context) *UpdateScaleSetParams { - return &UpdateScaleSetParams{ - Context: ctx, - } -} - -// NewUpdateScaleSetParamsWithHTTPClient creates a new UpdateScaleSetParams object -// with the ability to set a custom HTTPClient for a request. -func NewUpdateScaleSetParamsWithHTTPClient(client *http.Client) *UpdateScaleSetParams { - return &UpdateScaleSetParams{ - HTTPClient: client, - } -} - -/* -UpdateScaleSetParams contains all the parameters to send to the API endpoint - - for the update scale set operation. - - Typically these are written to a http.Request. -*/ -type UpdateScaleSetParams struct { - - /* Body. - - Parameters to update the scale set with. - */ - Body garm_params.UpdateScaleSetParams - - /* ScalesetID. - - ID of the scale set to update. - */ - ScalesetID string - - timeout time.Duration - Context context.Context - HTTPClient *http.Client -} - -// WithDefaults hydrates default values in the update scale set params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *UpdateScaleSetParams) WithDefaults() *UpdateScaleSetParams { - o.SetDefaults() - return o -} - -// SetDefaults hydrates default values in the update scale set params (not the query body). -// -// All values with no default are reset to their zero value. -func (o *UpdateScaleSetParams) SetDefaults() { - // no default values defined for this parameter -} - -// WithTimeout adds the timeout to the update scale set params -func (o *UpdateScaleSetParams) WithTimeout(timeout time.Duration) *UpdateScaleSetParams { - o.SetTimeout(timeout) - return o -} - -// SetTimeout adds the timeout to the update scale set params -func (o *UpdateScaleSetParams) SetTimeout(timeout time.Duration) { - o.timeout = timeout -} - -// WithContext adds the context to the update scale set params -func (o *UpdateScaleSetParams) WithContext(ctx context.Context) *UpdateScaleSetParams { - o.SetContext(ctx) - return o -} - -// SetContext adds the context to the update scale set params -func (o *UpdateScaleSetParams) SetContext(ctx context.Context) { - o.Context = ctx -} - -// WithHTTPClient adds the HTTPClient to the update scale set params -func (o *UpdateScaleSetParams) WithHTTPClient(client *http.Client) *UpdateScaleSetParams { - o.SetHTTPClient(client) - return o -} - -// SetHTTPClient adds the HTTPClient to the update scale set params -func (o *UpdateScaleSetParams) SetHTTPClient(client *http.Client) { - o.HTTPClient = client -} - -// WithBody adds the body to the update scale set params -func (o *UpdateScaleSetParams) WithBody(body garm_params.UpdateScaleSetParams) *UpdateScaleSetParams { - o.SetBody(body) - return o -} - -// SetBody adds the body to the update scale set params -func (o *UpdateScaleSetParams) SetBody(body garm_params.UpdateScaleSetParams) { - o.Body = body -} - -// WithScalesetID adds the scalesetID to the update scale set params -func (o *UpdateScaleSetParams) WithScalesetID(scalesetID string) *UpdateScaleSetParams { - o.SetScalesetID(scalesetID) - return o -} - -// SetScalesetID adds the scalesetId to the update scale set params -func (o *UpdateScaleSetParams) SetScalesetID(scalesetID string) { - o.ScalesetID = scalesetID -} - -// WriteToRequest writes these params to a swagger request -func (o *UpdateScaleSetParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { - - if err := r.SetTimeout(o.timeout); err != nil { - return err - } - var res []error - if err := r.SetBodyParam(o.Body); err != nil { - return err - } - - // path param scalesetID - if err := r.SetPathParam("scalesetID", o.ScalesetID); err != nil { - return err - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} diff --git a/client/scalesets/update_scale_set_responses.go b/client/scalesets/update_scale_set_responses.go deleted file mode 100644 index 666e8256..00000000 --- a/client/scalesets/update_scale_set_responses.go +++ /dev/null @@ -1,184 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package scalesets - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "encoding/json" - "fmt" - "io" - - "github.com/go-openapi/runtime" - "github.com/go-openapi/strfmt" - - apiserver_params "github.com/cloudbase/garm/apiserver/params" - garm_params "github.com/cloudbase/garm/params" -) - -// UpdateScaleSetReader is a Reader for the UpdateScaleSet structure. -type UpdateScaleSetReader struct { - formats strfmt.Registry -} - -// ReadResponse reads a server response into the received o. -func (o *UpdateScaleSetReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { - switch response.Code() { - case 200: - result := NewUpdateScaleSetOK() - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - return result, nil - default: - result := NewUpdateScaleSetDefault(response.Code()) - if err := result.readResponse(response, consumer, o.formats); err != nil { - return nil, err - } - if response.Code()/100 == 2 { - return result, nil - } - return nil, result - } -} - -// NewUpdateScaleSetOK creates a UpdateScaleSetOK with default headers values -func NewUpdateScaleSetOK() *UpdateScaleSetOK { - return &UpdateScaleSetOK{} -} - -/* -UpdateScaleSetOK describes a response with status code 200, with default header values. - -ScaleSet -*/ -type UpdateScaleSetOK struct { - Payload garm_params.ScaleSet -} - -// IsSuccess returns true when this update scale set o k response has a 2xx status code -func (o *UpdateScaleSetOK) IsSuccess() bool { - return true -} - -// IsRedirect returns true when this update scale set o k response has a 3xx status code -func (o *UpdateScaleSetOK) IsRedirect() bool { - return false -} - -// IsClientError returns true when this update scale set o k response has a 4xx status code -func (o *UpdateScaleSetOK) IsClientError() bool { - return false -} - -// IsServerError returns true when this update scale set o k response has a 5xx status code -func (o *UpdateScaleSetOK) IsServerError() bool { - return false -} - -// IsCode returns true when this update scale set o k response a status code equal to that given -func (o *UpdateScaleSetOK) IsCode(code int) bool { - return code == 200 -} - -// Code gets the status code for the update scale set o k response -func (o *UpdateScaleSetOK) Code() int { - return 200 -} - -func (o *UpdateScaleSetOK) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[PUT /scalesets/{scalesetID}][%d] updateScaleSetOK %s", 200, payload) -} - -func (o *UpdateScaleSetOK) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[PUT /scalesets/{scalesetID}][%d] updateScaleSetOK %s", 200, payload) -} - -func (o *UpdateScaleSetOK) GetPayload() garm_params.ScaleSet { - return o.Payload -} - -func (o *UpdateScaleSetOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} - -// NewUpdateScaleSetDefault creates a UpdateScaleSetDefault with default headers values -func NewUpdateScaleSetDefault(code int) *UpdateScaleSetDefault { - return &UpdateScaleSetDefault{ - _statusCode: code, - } -} - -/* -UpdateScaleSetDefault describes a response with status code -1, with default header values. - -APIErrorResponse -*/ -type UpdateScaleSetDefault struct { - _statusCode int - - Payload apiserver_params.APIErrorResponse -} - -// IsSuccess returns true when this update scale set default response has a 2xx status code -func (o *UpdateScaleSetDefault) IsSuccess() bool { - return o._statusCode/100 == 2 -} - -// IsRedirect returns true when this update scale set default response has a 3xx status code -func (o *UpdateScaleSetDefault) IsRedirect() bool { - return o._statusCode/100 == 3 -} - -// IsClientError returns true when this update scale set default response has a 4xx status code -func (o *UpdateScaleSetDefault) IsClientError() bool { - return o._statusCode/100 == 4 -} - -// IsServerError returns true when this update scale set default response has a 5xx status code -func (o *UpdateScaleSetDefault) IsServerError() bool { - return o._statusCode/100 == 5 -} - -// IsCode returns true when this update scale set default response a status code equal to that given -func (o *UpdateScaleSetDefault) IsCode(code int) bool { - return o._statusCode == code -} - -// Code gets the status code for the update scale set default response -func (o *UpdateScaleSetDefault) Code() int { - return o._statusCode -} - -func (o *UpdateScaleSetDefault) Error() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[PUT /scalesets/{scalesetID}][%d] UpdateScaleSet default %s", o._statusCode, payload) -} - -func (o *UpdateScaleSetDefault) String() string { - payload, _ := json.Marshal(o.Payload) - return fmt.Sprintf("[PUT /scalesets/{scalesetID}][%d] UpdateScaleSet default %s", o._statusCode, payload) -} - -func (o *UpdateScaleSetDefault) GetPayload() apiserver_params.APIErrorResponse { - return o.Payload -} - -func (o *UpdateScaleSetDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - - // response payload - if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { - return err - } - - return nil -} diff --git a/cmd/garm-cli/cmd/enterprise.go b/cmd/garm-cli/cmd/enterprise.go index 5c937b81..eabfad26 100644 --- a/cmd/garm-cli/cmd/enterprise.go +++ b/cmd/garm-cli/cmd/enterprise.go @@ -16,7 +16,6 @@ package cmd import ( "fmt" - "strings" "github.com/jedib0t/go-pretty/v6/table" "github.com/spf13/cobra" @@ -28,7 +27,6 @@ import ( var ( enterpriseName string - enterpriseEndpoint string enterpriseWebhookSecret string enterpriseCreds string ) @@ -86,8 +84,6 @@ var enterpriseListCmd = &cobra.Command{ } listEnterprisesReq := apiClientEnterprises.NewListEnterprisesParams() - listEnterprisesReq.Name = &enterpriseName - listEnterprisesReq.Endpoint = &enterpriseEndpoint response, err := apiCli.Enterprises.ListEnterprises(listEnterprisesReq, authToken) if err != nil { return err @@ -112,14 +108,8 @@ var enterpriseShowCmd = &cobra.Command{ if len(args) > 1 { return fmt.Errorf("too many arguments") } - - enterpriseID, err := resolveEnterprise(args[0], enterpriseEndpoint) - if err != nil { - return err - } - showEnterpriseReq := apiClientEnterprises.NewGetEnterpriseParams() - showEnterpriseReq.EnterpriseID = enterpriseID + showEnterpriseReq.EnterpriseID = args[0] response, err := apiCli.Enterprises.GetEnterprise(showEnterpriseReq, authToken) if err != nil { return err @@ -145,14 +135,8 @@ var enterpriseDeleteCmd = &cobra.Command{ if len(args) > 1 { return fmt.Errorf("too many arguments") } - - enterpriseID, err := resolveEnterprise(args[0], enterpriseEndpoint) - if err != nil { - return err - } - deleteEnterpriseReq := apiClientEnterprises.NewDeleteEnterpriseParams() - deleteEnterpriseReq.EnterpriseID = enterpriseID + deleteEnterpriseReq.EnterpriseID = args[0] if err := apiCli.Enterprises.DeleteEnterprise(deleteEnterpriseReq, authToken); err != nil { return err } @@ -177,18 +161,13 @@ var enterpriseUpdateCmd = &cobra.Command{ if len(args) > 1 { return fmt.Errorf("too many arguments") } - enterpriseID, err := resolveEnterprise(args[0], enterpriseEndpoint) - if err != nil { - return err - } - updateEnterpriseReq := apiClientEnterprises.NewUpdateEnterpriseParams() updateEnterpriseReq.Body = params.UpdateEntityParams{ WebhookSecret: repoWebhookSecret, CredentialsName: repoCreds, PoolBalancerType: params.PoolBalancerType(poolBalancerType), } - updateEnterpriseReq.EnterpriseID = enterpriseID + updateEnterpriseReq.EnterpriseID = args[0] response, err := apiCli.Enterprises.UpdateEnterprise(updateEnterpriseReq, authToken) if err != nil { return err @@ -205,18 +184,12 @@ func init() { enterpriseAddCmd.Flags().StringVar(&poolBalancerType, "pool-balancer-type", string(params.PoolBalancerTypeRoundRobin), "The balancing strategy to use when creating runners in pools matching requested labels.") enterpriseListCmd.Flags().BoolVarP(&long, "long", "l", false, "Include additional info.") - enterpriseListCmd.Flags().StringVarP(&enterpriseName, "name", "n", "", "Exact enterprise name to filter by.") - enterpriseListCmd.Flags().StringVarP(&enterpriseEndpoint, "endpoint", "e", "", "Exact endpoint name to filter by.") enterpriseAddCmd.MarkFlagRequired("credentials") //nolint enterpriseAddCmd.MarkFlagRequired("name") //nolint enterpriseUpdateCmd.Flags().StringVar(&enterpriseWebhookSecret, "webhook-secret", "", "The webhook secret for this enterprise") enterpriseUpdateCmd.Flags().StringVar(&enterpriseCreds, "credentials", "", "Credentials name. See credentials list.") enterpriseUpdateCmd.Flags().StringVar(&poolBalancerType, "pool-balancer-type", "", "The balancing strategy to use when creating runners in pools matching requested labels.") - enterpriseUpdateCmd.Flags().StringVar(&enterpriseEndpoint, "endpoint", "", "When using the name of the enterprise, the endpoint must be specified when multiple enterprises with the same name exist.") - - enterpriseDeleteCmd.Flags().StringVar(&enterpriseEndpoint, "endpoint", "", "When using the name of the enterprise, the endpoint must be specified when multiple enterprises with the same name exist.") - enterpriseShowCmd.Flags().StringVar(&enterpriseEndpoint, "endpoint", "", "When using the name of the enterprise, the endpoint must be specified when multiple enterprises with the same name exist.") enterpriseCmd.AddCommand( enterpriseListCmd, @@ -277,15 +250,9 @@ func formatOneEnterprise(enterprise params.Enterprise) { t.AppendRow(table.Row{"Pools", pool.ID}, rowConfigAutoMerge) } } - - if len(enterprise.Events) > 0 { - for _, event := range enterprise.Events { - t.AppendRow(table.Row{"Events", fmt.Sprintf("%s %s: %s", event.CreatedAt.Format("2006-01-02T15:04:05"), strings.ToUpper(string(event.EventLevel)), event.Message)}, rowConfigAutoMerge) - } - } t.SetColumnConfigs([]table.ColumnConfig{ {Number: 1, AutoMerge: true}, - {Number: 2, AutoMerge: false, WidthMax: 100}, + {Number: 2, AutoMerge: false}, }) fmt.Println(t.Render()) diff --git a/cmd/garm-cli/cmd/events.go b/cmd/garm-cli/cmd/events.go index da44732a..f38e9ea6 100644 --- a/cmd/garm-cli/cmd/events.go +++ b/cmd/garm-cli/cmd/events.go @@ -1,16 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package cmd import ( diff --git a/cmd/garm-cli/cmd/gitea.go b/cmd/garm-cli/cmd/gitea.go deleted file mode 100644 index 6627fd6f..00000000 --- a/cmd/garm-cli/cmd/gitea.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package cmd - -import "github.com/spf13/cobra" - -// giteaCmd represents the the gitea command. This command has a set -// of subcommands that allow configuring and managing Gitea endpoints -// and credentials. -var giteaCmd = &cobra.Command{ - Use: "gitea", - Aliases: []string{"gt"}, - SilenceUsage: true, - Short: "Manage Gitea resources", - Long: `Manage Gitea related resources. - -This command allows you to configure and manage Gitea endpoints and credentials`, - Run: nil, -} - -func init() { - rootCmd.AddCommand(giteaCmd) -} diff --git a/cmd/garm-cli/cmd/gitea_credentials.go b/cmd/garm-cli/cmd/gitea_credentials.go deleted file mode 100644 index d26f95ed..00000000 --- a/cmd/garm-cli/cmd/gitea_credentials.go +++ /dev/null @@ -1,317 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package cmd - -import ( - "fmt" - "strconv" - - "github.com/jedib0t/go-pretty/v6/table" - "github.com/spf13/cobra" - - apiClientCreds "github.com/cloudbase/garm/client/credentials" - "github.com/cloudbase/garm/cmd/garm-cli/common" - "github.com/cloudbase/garm/params" -) - -// giteaCredentialsCmd represents the gitea credentials command -var giteaCredentialsCmd = &cobra.Command{ - Use: "credentials", - Aliases: []string{"creds"}, - Short: "Manage gitea credentials", - Long: `Manage Gitea credentials stored in GARM. - -This command allows you to add, update, list and delete Gitea credentials.`, - Run: nil, -} - -var giteaCredentialsListCmd = &cobra.Command{ - Use: "list", - Aliases: []string{"ls"}, - Short: "List configured gitea credentials", - Long: `List the names of the gitea personal access tokens available to the garm.`, - SilenceUsage: true, - RunE: func(_ *cobra.Command, _ []string) error { - if needsInit { - return errNeedsInitError - } - - listCredsReq := apiClientCreds.NewListGiteaCredentialsParams() - response, err := apiCli.Credentials.ListGiteaCredentials(listCredsReq, authToken) - if err != nil { - return err - } - formatGiteaCredentials(response.Payload) - return nil - }, -} - -var giteaCredentialsShowCmd = &cobra.Command{ - Use: "show", - Aliases: []string{"get"}, - Short: "Show details of a configured gitea credential", - Long: `Show the details of a configured gitea credential.`, - SilenceUsage: true, - RunE: func(_ *cobra.Command, args []string) error { - if needsInit { - return errNeedsInitError - } - - if len(args) < 1 { - return fmt.Errorf("missing required argument: credential ID") - } - - credID, err := strconv.ParseInt(args[0], 10, 64) - if err != nil { - return fmt.Errorf("invalid credential ID: %s", args[0]) - } - showCredsReq := apiClientCreds.NewGetGiteaCredentialsParams().WithID(credID) - response, err := apiCli.Credentials.GetGiteaCredentials(showCredsReq, authToken) - if err != nil { - return err - } - formatOneGiteaCredential(response.Payload) - return nil - }, -} - -var giteaCredentialsUpdateCmd = &cobra.Command{ - Use: "update", - Short: "Update a gitea credential", - Long: "Update a gitea credential", - SilenceUsage: true, - RunE: func(_ *cobra.Command, args []string) error { - if needsInit { - return errNeedsInitError - } - - if len(args) < 1 { - return fmt.Errorf("missing required argument: credential ID") - } - - if len(args) > 1 { - return fmt.Errorf("too many arguments") - } - - credID, err := strconv.ParseInt(args[0], 10, 64) - if err != nil { - return fmt.Errorf("invalid credential ID: %s", args[0]) - } - - updateParams, err := parseGiteaCredentialsUpdateParams() - if err != nil { - return err - } - - updateCredsReq := apiClientCreds.NewUpdateGiteaCredentialsParams().WithID(credID) - updateCredsReq.Body = updateParams - - response, err := apiCli.Credentials.UpdateGiteaCredentials(updateCredsReq, authToken) - if err != nil { - return err - } - formatOneGiteaCredential(response.Payload) - return nil - }, -} - -var giteaCredentialsDeleteCmd = &cobra.Command{ - Use: "delete", - Aliases: []string{"remove", "rm"}, - Short: "Delete a gitea credential", - Long: "Delete a gitea credential", - SilenceUsage: true, - RunE: func(_ *cobra.Command, args []string) error { - if needsInit { - return errNeedsInitError - } - - if len(args) < 1 { - return fmt.Errorf("missing required argument: credential ID") - } - - if len(args) > 1 { - return fmt.Errorf("too many arguments") - } - - credID, err := strconv.ParseInt(args[0], 10, 64) - if err != nil { - return fmt.Errorf("invalid credential ID: %s", args[0]) - } - - deleteCredsReq := apiClientCreds.NewDeleteGiteaCredentialsParams().WithID(credID) - if err := apiCli.Credentials.DeleteGiteaCredentials(deleteCredsReq, authToken); err != nil { - return err - } - return nil - }, -} - -var giteaCredentialsAddCmd = &cobra.Command{ - Use: "add", - Short: "Add a gitea credential", - Long: "Add a gitea credential", - SilenceUsage: true, - RunE: func(_ *cobra.Command, args []string) error { - if needsInit { - return errNeedsInitError - } - - if len(args) > 0 { - return fmt.Errorf("too many arguments") - } - - addParams, err := parseGiteaCredentialsAddParams() - if err != nil { - return err - } - - addCredsReq := apiClientCreds.NewCreateGiteaCredentialsParams() - addCredsReq.Body = addParams - - response, err := apiCli.Credentials.CreateGiteaCredentials(addCredsReq, authToken) - if err != nil { - return err - } - formatOneGiteaCredential(response.Payload) - return nil - }, -} - -func init() { - giteaCredentialsUpdateCmd.Flags().StringVar(&credentialsName, "name", "", "Name of the credential") - giteaCredentialsUpdateCmd.Flags().StringVar(&credentialsDescription, "description", "", "Description of the credential") - giteaCredentialsUpdateCmd.Flags().StringVar(&credentialsOAuthToken, "pat-oauth-token", "", "If the credential is a personal access token, the OAuth token") - - giteaCredentialsListCmd.Flags().BoolVarP(&long, "long", "l", false, "Include additional info.") - - giteaCredentialsAddCmd.Flags().StringVar(&credentialsName, "name", "", "Name of the credential") - giteaCredentialsAddCmd.Flags().StringVar(&credentialsDescription, "description", "", "Description of the credential") - giteaCredentialsAddCmd.Flags().StringVar(&credentialsOAuthToken, "pat-oauth-token", "", "If the credential is a personal access token, the OAuth token") - giteaCredentialsAddCmd.Flags().StringVar(&credentialsType, "auth-type", "", "The type of the credential") - giteaCredentialsAddCmd.Flags().StringVar(&credentialsEndpoint, "endpoint", "", "The endpoint to associate the credential with") - - giteaCredentialsAddCmd.MarkFlagRequired("name") - giteaCredentialsAddCmd.MarkFlagRequired("auth-type") - giteaCredentialsAddCmd.MarkFlagRequired("description") - giteaCredentialsAddCmd.MarkFlagRequired("endpoint") - - giteaCredentialsCmd.AddCommand( - giteaCredentialsListCmd, - giteaCredentialsShowCmd, - giteaCredentialsUpdateCmd, - giteaCredentialsDeleteCmd, - giteaCredentialsAddCmd, - ) - giteaCmd.AddCommand(giteaCredentialsCmd) -} - -func parseGiteaCredentialsAddParams() (ret params.CreateGiteaCredentialsParams, err error) { - ret.Name = credentialsName - ret.Description = credentialsDescription - ret.AuthType = params.ForgeAuthType(credentialsType) - ret.Endpoint = credentialsEndpoint - switch ret.AuthType { - case params.ForgeAuthTypePAT: - ret.PAT.OAuth2Token = credentialsOAuthToken - default: - return params.CreateGiteaCredentialsParams{}, fmt.Errorf("invalid auth type: %s (supported are: pat)", credentialsType) - } - - return ret, nil -} - -func parseGiteaCredentialsUpdateParams() (params.UpdateGiteaCredentialsParams, error) { - var updateParams params.UpdateGiteaCredentialsParams - - if credentialsName != "" { - updateParams.Name = &credentialsName - } - - if credentialsDescription != "" { - updateParams.Description = &credentialsDescription - } - - if credentialsOAuthToken != "" { - if updateParams.PAT == nil { - updateParams.PAT = ¶ms.GithubPAT{} - } - updateParams.PAT.OAuth2Token = credentialsOAuthToken - } - - return updateParams, nil -} - -func formatGiteaCredentials(creds []params.ForgeCredentials) { - if outputFormat == common.OutputFormatJSON { - printAsJSON(creds) - return - } - t := table.NewWriter() - header := table.Row{"ID", "Name", "Description", "Base URL", "API URL", "Type"} - if long { - header = append(header, "Created At", "Updated At") - } - t.AppendHeader(header) - for _, val := range creds { - row := table.Row{val.ID, val.Name, val.Description, val.BaseURL, val.APIBaseURL, val.AuthType} - if long { - row = append(row, val.CreatedAt, val.UpdatedAt) - } - t.AppendRow(row) - t.AppendSeparator() - } - fmt.Println(t.Render()) -} - -func formatOneGiteaCredential(cred params.ForgeCredentials) { - if outputFormat == common.OutputFormatJSON { - printAsJSON(cred) - return - } - t := table.NewWriter() - header := table.Row{"Field", "Value"} - t.AppendHeader(header) - - t.AppendRow(table.Row{"ID", cred.ID}) - t.AppendRow(table.Row{"Created At", cred.CreatedAt}) - t.AppendRow(table.Row{"Updated At", cred.UpdatedAt}) - t.AppendRow(table.Row{"Name", cred.Name}) - t.AppendRow(table.Row{"Description", cred.Description}) - t.AppendRow(table.Row{"Base URL", cred.BaseURL}) - t.AppendRow(table.Row{"API URL", cred.APIBaseURL}) - t.AppendRow(table.Row{"Type", cred.AuthType}) - t.AppendRow(table.Row{"Endpoint", cred.Endpoint.Name}) - - if len(cred.Repositories) > 0 { - t.AppendRow(table.Row{"", ""}) - for _, repo := range cred.Repositories { - t.AppendRow(table.Row{"Repositories", repo.String()}) - } - } - - if len(cred.Organizations) > 0 { - t.AppendRow(table.Row{"", ""}) - for _, org := range cred.Organizations { - t.AppendRow(table.Row{"Organizations", org.Name}) - } - } - - t.SetColumnConfigs([]table.ColumnConfig{ - {Number: 1, AutoMerge: true}, - {Number: 2, AutoMerge: false, WidthMax: 100}, - }) - fmt.Println(t.Render()) -} diff --git a/cmd/garm-cli/cmd/gitea_endpoints.go b/cmd/garm-cli/cmd/gitea_endpoints.go deleted file mode 100644 index 55fa09c9..00000000 --- a/cmd/garm-cli/cmd/gitea_endpoints.go +++ /dev/null @@ -1,231 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package cmd - -import ( - "fmt" - - "github.com/spf13/cobra" - - apiClientEndpoints "github.com/cloudbase/garm/client/endpoints" - "github.com/cloudbase/garm/params" -) - -var giteaEndpointCmd = &cobra.Command{ - Use: "endpoint", - SilenceUsage: true, - Short: "Manage Gitea endpoints", - Long: `Manage Gitea endpoints. - -This command allows you to configure and manage Gitea endpoints`, - Run: nil, -} - -var giteaEndpointListCmd = &cobra.Command{ - Use: "list", - Aliases: []string{"ls"}, - SilenceUsage: true, - Short: "List Gitea endpoints", - Long: `List all configured Gitea endpoints.`, - RunE: func(_ *cobra.Command, _ []string) error { - if needsInit { - return errNeedsInitError - } - - newListReq := apiClientEndpoints.NewListGiteaEndpointsParams() - response, err := apiCli.Endpoints.ListGiteaEndpoints(newListReq, authToken) - if err != nil { - return err - } - formatEndpoints(response.Payload) - return nil - }, -} - -var giteaEndpointShowCmd = &cobra.Command{ - Use: "show", - Aliases: []string{"get"}, - SilenceUsage: true, - Short: "Show Gitea endpoint", - Long: `Show details of a Gitea endpoint.`, - RunE: func(_ *cobra.Command, args []string) error { - if needsInit { - return errNeedsInitError - } - if len(args) == 0 { - return fmt.Errorf("requires an endpoint name") - } - if len(args) > 1 { - return fmt.Errorf("too many arguments") - } - - newShowReq := apiClientEndpoints.NewGetGiteaEndpointParams() - newShowReq.Name = args[0] - response, err := apiCli.Endpoints.GetGiteaEndpoint(newShowReq, authToken) - if err != nil { - return err - } - formatOneEndpoint(response.Payload) - return nil - }, -} - -var giteaEndpointCreateCmd = &cobra.Command{ - Use: "create", - SilenceUsage: true, - Short: "Create Gitea endpoint", - Long: `Create a new Gitea endpoint.`, - RunE: func(_ *cobra.Command, _ []string) error { - if needsInit { - return errNeedsInitError - } - - createParams, err := parseGiteaCreateParams() - if err != nil { - return err - } - - newCreateReq := apiClientEndpoints.NewCreateGiteaEndpointParams() - newCreateReq.Body = createParams - - response, err := apiCli.Endpoints.CreateGiteaEndpoint(newCreateReq, authToken) - if err != nil { - return err - } - formatOneEndpoint(response.Payload) - return nil - }, -} - -var giteaEndpointDeleteCmd = &cobra.Command{ - Use: "delete", - Aliases: []string{"remove", "rm"}, - SilenceUsage: true, - Short: "Delete Gitea endpoint", - Long: "Delete a Gitea endpoint", - RunE: func(_ *cobra.Command, args []string) error { - if needsInit { - return errNeedsInitError - } - if len(args) == 0 { - return fmt.Errorf("requires an endpoint name") - } - if len(args) > 1 { - return fmt.Errorf("too many arguments") - } - - newDeleteReq := apiClientEndpoints.NewDeleteGiteaEndpointParams() - newDeleteReq.Name = args[0] - if err := apiCli.Endpoints.DeleteGiteaEndpoint(newDeleteReq, authToken); err != nil { - return err - } - return nil - }, -} - -var giteaEndpointUpdateCmd = &cobra.Command{ - Use: "update", - Short: "Update Gitea endpoint", - Long: "Update a Gitea endpoint", - SilenceUsage: true, - RunE: func(cmd *cobra.Command, args []string) error { - if needsInit { - return errNeedsInitError - } - if len(args) == 0 { - return fmt.Errorf("requires an endpoint name") - } - if len(args) > 1 { - return fmt.Errorf("too many arguments") - } - - updateParams := params.UpdateGiteaEndpointParams{} - - if cmd.Flags().Changed("ca-cert-path") { - cert, err := parseAndReadCABundle() - if err != nil { - return err - } - updateParams.CACertBundle = cert - } - - if cmd.Flags().Changed("description") { - updateParams.Description = &endpointDescription - } - - if cmd.Flags().Changed("base-url") { - updateParams.BaseURL = &endpointBaseURL - } - - if cmd.Flags().Changed("api-base-url") { - updateParams.APIBaseURL = &endpointAPIBaseURL - } - - newEndpointUpdateReq := apiClientEndpoints.NewUpdateGiteaEndpointParams() - newEndpointUpdateReq.Name = args[0] - newEndpointUpdateReq.Body = updateParams - - response, err := apiCli.Endpoints.UpdateGiteaEndpoint(newEndpointUpdateReq, authToken) - if err != nil { - return err - } - formatOneEndpoint(response.Payload) - return nil - }, -} - -func init() { - giteaEndpointCreateCmd.Flags().StringVar(&endpointName, "name", "", "Name of the Gitea endpoint") - giteaEndpointCreateCmd.Flags().StringVar(&endpointDescription, "description", "", "Description for the github endpoint") - giteaEndpointCreateCmd.Flags().StringVar(&endpointBaseURL, "base-url", "", "Base URL of the Gitea endpoint") - giteaEndpointCreateCmd.Flags().StringVar(&endpointAPIBaseURL, "api-base-url", "", "API Base URL of the Gitea endpoint") - giteaEndpointCreateCmd.Flags().StringVar(&endpointCACertPath, "ca-cert-path", "", "CA Cert Path of the Gitea endpoint") - - giteaEndpointListCmd.Flags().BoolVarP(&long, "long", "l", false, "Include additional info.") - - giteaEndpointCreateCmd.MarkFlagRequired("name") - giteaEndpointCreateCmd.MarkFlagRequired("base-url") - giteaEndpointCreateCmd.MarkFlagRequired("api-base-url") - - giteaEndpointUpdateCmd.Flags().StringVar(&endpointDescription, "description", "", "Description for the gitea endpoint") - giteaEndpointUpdateCmd.Flags().StringVar(&endpointBaseURL, "base-url", "", "Base URL of the Gitea endpoint") - giteaEndpointUpdateCmd.Flags().StringVar(&endpointAPIBaseURL, "api-base-url", "", "API Base URL of the Gitea endpoint") - giteaEndpointUpdateCmd.Flags().StringVar(&endpointCACertPath, "ca-cert-path", "", "CA Cert Path of the Gitea endpoint") - - giteaEndpointCmd.AddCommand( - giteaEndpointListCmd, - giteaEndpointShowCmd, - giteaEndpointCreateCmd, - giteaEndpointDeleteCmd, - giteaEndpointUpdateCmd, - ) - - giteaCmd.AddCommand(giteaEndpointCmd) -} - -func parseGiteaCreateParams() (params.CreateGiteaEndpointParams, error) { - certBundleBytes, err := parseAndReadCABundle() - if err != nil { - return params.CreateGiteaEndpointParams{}, err - } - - ret := params.CreateGiteaEndpointParams{ - Name: endpointName, - BaseURL: endpointBaseURL, - APIBaseURL: endpointAPIBaseURL, - Description: endpointDescription, - CACertBundle: certBundleBytes, - } - return ret, nil -} diff --git a/cmd/garm-cli/cmd/github.go b/cmd/garm-cli/cmd/github.go index 71342026..8b79a381 100644 --- a/cmd/garm-cli/cmd/github.go +++ b/cmd/garm-cli/cmd/github.go @@ -1,16 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package cmd import "github.com/spf13/cobra" diff --git a/cmd/garm-cli/cmd/github_credentials.go b/cmd/garm-cli/cmd/github_credentials.go index 6f9b6409..c4faec1a 100644 --- a/cmd/garm-cli/cmd/github_credentials.go +++ b/cmd/garm-cli/cmd/github_credentials.go @@ -283,12 +283,12 @@ func parsePrivateKeyFromPath(path string) ([]byte, error) { func parseCredentialsAddParams() (ret params.CreateGithubCredentialsParams, err error) { ret.Name = credentialsName ret.Description = credentialsDescription - ret.AuthType = params.ForgeAuthType(credentialsType) + ret.AuthType = params.GithubAuthType(credentialsType) ret.Endpoint = credentialsEndpoint switch ret.AuthType { - case params.ForgeAuthTypePAT: + case params.GithubAuthTypePAT: ret.PAT.OAuth2Token = credentialsOAuthToken - case params.ForgeAuthTypeApp: + case params.GithubAuthTypeApp: ret.App.InstallationID = credentialsAppInstallationID ret.App.AppID = credentialsAppID keyContents, err := parsePrivateKeyFromPath(credentialsPrivateKeyPath) @@ -344,7 +344,7 @@ func parseCredentialsUpdateParams() (params.UpdateGithubCredentialsParams, error return updateParams, nil } -func formatGithubCredentials(creds []params.ForgeCredentials) { +func formatGithubCredentials(creds []params.GithubCredentials) { if outputFormat == common.OutputFormatJSON { printAsJSON(creds) return @@ -366,7 +366,7 @@ func formatGithubCredentials(creds []params.ForgeCredentials) { fmt.Println(t.Render()) } -func formatOneGithubCredential(cred params.ForgeCredentials) { +func formatOneGithubCredential(cred params.GithubCredentials) { if outputFormat == common.OutputFormatJSON { printAsJSON(cred) return @@ -375,11 +375,6 @@ func formatOneGithubCredential(cred params.ForgeCredentials) { header := table.Row{"Field", "Value"} t.AppendHeader(header) - var resetMinutes float64 - if cred.RateLimit != nil { - resetMinutes = cred.RateLimit.ResetIn().Minutes() - } - t.AppendRow(table.Row{"ID", cred.ID}) t.AppendRow(table.Row{"Created At", cred.CreatedAt}) t.AppendRow(table.Row{"Updated At", cred.UpdatedAt}) @@ -390,11 +385,6 @@ func formatOneGithubCredential(cred params.ForgeCredentials) { t.AppendRow(table.Row{"Upload URL", cred.UploadBaseURL}) t.AppendRow(table.Row{"Type", cred.AuthType}) t.AppendRow(table.Row{"Endpoint", cred.Endpoint.Name}) - if resetMinutes > 0 { - t.AppendRow(table.Row{"", ""}) - t.AppendRow(table.Row{"Remaining API requests", cred.RateLimit.Remaining}) - t.AppendRow(table.Row{"Rate limit reset", fmt.Sprintf("%d minutes", int64(resetMinutes))}) - } if len(cred.Repositories) > 0 { t.AppendRow(table.Row{"", ""}) diff --git a/cmd/garm-cli/cmd/github_endpoints.go b/cmd/garm-cli/cmd/github_endpoints.go index 61f46810..2be14f52 100644 --- a/cmd/garm-cli/cmd/github_endpoints.go +++ b/cmd/garm-cli/cmd/github_endpoints.go @@ -1,16 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package cmd import ( @@ -158,7 +145,7 @@ var githubEndpointUpdateCmd = &cobra.Command{ updateParams := params.UpdateGithubEndpointParams{} if cmd.Flags().Changed("ca-cert-path") { - cert, err := parseAndReadCABundle() + cert, err := parseReadAndParsCABundle() if err != nil { return err } @@ -226,7 +213,7 @@ func init() { githubCmd.AddCommand(githubEndpointCmd) } -func parseAndReadCABundle() ([]byte, error) { +func parseReadAndParsCABundle() ([]byte, error) { if endpointCACertPath == "" { return nil, nil } @@ -249,7 +236,7 @@ func parseAndReadCABundle() ([]byte, error) { } func parseCreateParams() (params.CreateGithubEndpointParams, error) { - certBundleBytes, err := parseAndReadCABundle() + certBundleBytes, err := parseReadAndParsCABundle() if err != nil { return params.CreateGithubEndpointParams{}, err } @@ -265,7 +252,7 @@ func parseCreateParams() (params.CreateGithubEndpointParams, error) { return ret, nil } -func formatEndpoints(endpoints params.ForgeEndpoints) { +func formatEndpoints(endpoints params.GithubEndpoints) { if outputFormat == common.OutputFormatJSON { printAsJSON(endpoints) return @@ -287,7 +274,7 @@ func formatEndpoints(endpoints params.ForgeEndpoints) { fmt.Println(t.Render()) } -func formatOneEndpoint(endpoint params.ForgeEndpoint) { +func formatOneEndpoint(endpoint params.GithubEndpoint) { if outputFormat == common.OutputFormatJSON { printAsJSON(endpoint) return @@ -300,9 +287,7 @@ func formatOneEndpoint(endpoint params.ForgeEndpoint) { t.AppendRow([]interface{}{"Created At", endpoint.CreatedAt}) t.AppendRow([]interface{}{"Updated At", endpoint.UpdatedAt}) t.AppendRow([]interface{}{"Base URL", endpoint.BaseURL}) - if endpoint.UploadBaseURL != "" { - t.AppendRow([]interface{}{"Upload URL", endpoint.UploadBaseURL}) - } + t.AppendRow([]interface{}{"Upload URL", endpoint.UploadBaseURL}) t.AppendRow([]interface{}{"API Base URL", endpoint.APIBaseURL}) if len(endpoint.CACertBundle) > 0 { t.AppendRow([]interface{}{"CA Cert Bundle", string(endpoint.CACertBundle)}) diff --git a/cmd/garm-cli/cmd/init.go b/cmd/garm-cli/cmd/init.go index c544699e..6c6a6072 100644 --- a/cmd/garm-cli/cmd/init.go +++ b/cmd/garm-cli/cmd/init.go @@ -21,6 +21,7 @@ import ( openapiRuntimeClient "github.com/go-openapi/runtime/client" "github.com/jedib0t/go-pretty/v6/table" + "github.com/pkg/errors" "github.com/spf13/cobra" apiClientController "github.com/cloudbase/garm/client/controller" @@ -79,7 +80,7 @@ garm-cli init --name=dev --url=https://runner.example.com --username=admin --pas response, err := apiCli.FirstRun.FirstRun(newUserReq, authToken) if err != nil { - return fmt.Errorf("error initializing manager: %w", err) + return errors.Wrap(err, "initializing manager") } newLoginParamsReq := apiClientLogin.NewLoginParams() @@ -90,7 +91,7 @@ garm-cli init --name=dev --url=https://runner.example.com --username=admin --pas token, err := apiCli.Login.Login(newLoginParamsReq, authToken) if err != nil { - return fmt.Errorf("error authenticating: %w", err) + return errors.Wrap(err, "authenticating") } cfg.Managers = append(cfg.Managers, config.Manager{ @@ -103,7 +104,7 @@ garm-cli init --name=dev --url=https://runner.example.com --username=admin --pas cfg.ActiveManager = loginProfileName if err := cfg.SaveConfig(); err != nil { - return fmt.Errorf("error saving config: %w", err) + return errors.Wrap(err, "saving config") } updateUrlsReq := apiClientController.NewUpdateControllerParams() diff --git a/cmd/garm-cli/cmd/log.go b/cmd/garm-cli/cmd/log.go index a7d2dfba..901e8e0f 100644 --- a/cmd/garm-cli/cmd/log.go +++ b/cmd/garm-cli/cmd/log.go @@ -1,22 +1,8 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package cmd import ( "context" "os/signal" - "strings" "github.com/spf13/cobra" @@ -24,12 +10,7 @@ import ( "github.com/cloudbase/garm/cmd/garm-cli/common" ) -var ( - eventsFilters string - logLevel string - filters []string - enableColor bool -) +var eventsFilters string var logCmd = &cobra.Command{ Use: "debug-log", @@ -40,19 +21,7 @@ var logCmd = &cobra.Command{ ctx, stop := signal.NotifyContext(context.Background(), signals...) defer stop() - // Parse filters into map - attributeFilters := make(map[string]string) - for _, filter := range filters { - parts := strings.SplitN(filter, "=", 2) - if len(parts) == 2 { - attributeFilters[parts[0]] = parts[1] - } - } - - // Create log formatter with filters - logFormatter := common.NewLogFormatter(logLevel, attributeFilters, enableColor) - - reader, err := garmWs.NewReader(ctx, mgr.BaseURL, "/api/v1/ws/logs", mgr.Token, logFormatter.FormatWebsocketMessage) + reader, err := garmWs.NewReader(ctx, mgr.BaseURL, "/api/v1/ws/logs", mgr.Token, common.PrintWebsocketMessage) if err != nil { return err } @@ -67,9 +36,5 @@ var logCmd = &cobra.Command{ } func init() { - logCmd.Flags().StringVar(&logLevel, "log-level", "", "Minimum log level to display (DEBUG, INFO, WARN, ERROR)") - logCmd.Flags().StringArrayVar(&filters, "filter", []string{}, "Filter logs by attribute (format: key=value) or message content (msg=text). You can specify this option multiple times. The filter will return true for any of the attributes you set.") - logCmd.Flags().BoolVar(&enableColor, "enable-color", true, "Enable color logging (auto-detects terminal support)") - rootCmd.AddCommand(logCmd) } diff --git a/cmd/garm-cli/cmd/organization.go b/cmd/garm-cli/cmd/organization.go index b16812fa..c7be1f19 100644 --- a/cmd/garm-cli/cmd/organization.go +++ b/cmd/garm-cli/cmd/organization.go @@ -16,7 +16,6 @@ package cmd import ( "fmt" - "strings" "github.com/jedib0t/go-pretty/v6/table" "github.com/spf13/cobra" @@ -29,7 +28,6 @@ import ( var ( orgName string - orgEndpoint string orgWebhookSecret string orgCreds string orgRandomWebhookSecret bool @@ -76,13 +74,8 @@ var orgWebhookInstallCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } - orgID, err := resolveOrganization(args[0], orgEndpoint) - if err != nil { - return err - } - installWebhookReq := apiClientOrgs.NewInstallOrgWebhookParams() - installWebhookReq.OrgID = orgID + installWebhookReq.OrgID = args[0] installWebhookReq.Body.InsecureSSL = insecureOrgWebhook installWebhookReq.Body.WebhookEndpointType = params.WebhookEndpointDirect @@ -110,12 +103,9 @@ var orgHookInfoShowCmd = &cobra.Command{ if len(args) > 1 { return fmt.Errorf("too many arguments") } - orgID, err := resolveOrganization(args[0], orgEndpoint) - if err != nil { - return err - } + showWebhookInfoReq := apiClientOrgs.NewGetOrgWebhookInfoParams() - showWebhookInfoReq.OrgID = orgID + showWebhookInfoReq.OrgID = args[0] response, err := apiCli.Organizations.GetOrgWebhookInfo(showWebhookInfoReq, authToken) if err != nil { @@ -142,15 +132,10 @@ var orgWebhookUninstallCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } - orgID, err := resolveOrganization(args[0], orgEndpoint) - if err != nil { - return err - } - uninstallWebhookReq := apiClientOrgs.NewUninstallOrgWebhookParams() - uninstallWebhookReq.OrgID = orgID + uninstallWebhookReq.OrgID = args[0] - err = apiCli.Organizations.UninstallOrgWebhook(uninstallWebhookReq, authToken) + err := apiCli.Organizations.UninstallOrgWebhook(uninstallWebhookReq, authToken) if err != nil { return err } @@ -182,7 +167,6 @@ var orgAddCmd = &cobra.Command{ Name: orgName, WebhookSecret: orgWebhookSecret, CredentialsName: orgCreds, - ForgeType: params.EndpointType(forgeType), PoolBalancerType: params.PoolBalancerType(poolBalancerType), } response, err := apiCli.Organizations.CreateOrg(newOrgReq, authToken) @@ -229,19 +213,13 @@ var orgUpdateCmd = &cobra.Command{ if len(args) > 1 { return fmt.Errorf("too many arguments") } - - orgID, err := resolveOrganization(args[0], orgEndpoint) - if err != nil { - return err - } - updateOrgReq := apiClientOrgs.NewUpdateOrgParams() updateOrgReq.Body = params.UpdateEntityParams{ WebhookSecret: orgWebhookSecret, CredentialsName: orgCreds, PoolBalancerType: params.PoolBalancerType(poolBalancerType), } - updateOrgReq.OrgID = orgID + updateOrgReq.OrgID = args[0] response, err := apiCli.Organizations.UpdateOrg(updateOrgReq, authToken) if err != nil { return err @@ -263,8 +241,6 @@ var orgListCmd = &cobra.Command{ } listOrgsReq := apiClientOrgs.NewListOrgsParams() - listOrgsReq.Name = &orgName - listOrgsReq.Endpoint = &orgEndpoint response, err := apiCli.Organizations.ListOrgs(listOrgsReq, authToken) if err != nil { return err @@ -289,14 +265,8 @@ var orgShowCmd = &cobra.Command{ if len(args) > 1 { return fmt.Errorf("too many arguments") } - - orgID, err := resolveOrganization(args[0], orgEndpoint) - if err != nil { - return err - } - showOrgReq := apiClientOrgs.NewGetOrgParams() - showOrgReq.OrgID = orgID + showOrgReq.OrgID = args[0] response, err := apiCli.Organizations.GetOrg(showOrgReq, authToken) if err != nil { return err @@ -322,14 +292,8 @@ var orgDeleteCmd = &cobra.Command{ if len(args) > 1 { return fmt.Errorf("too many arguments") } - - orgID, err := resolveOrganization(args[0], orgEndpoint) - if err != nil { - return err - } - deleteOrgReq := apiClientOrgs.NewDeleteOrgParams() - deleteOrgReq.OrgID = orgID + deleteOrgReq.OrgID = args[0] deleteOrgReq.KeepWebhook = &keepOrgWebhook if err := apiCli.Organizations.DeleteOrg(deleteOrgReq, authToken); err != nil { return err @@ -342,37 +306,23 @@ func init() { orgAddCmd.Flags().StringVar(&orgName, "name", "", "The name of the organization") orgAddCmd.Flags().StringVar(&poolBalancerType, "pool-balancer-type", string(params.PoolBalancerTypeRoundRobin), "The balancing strategy to use when creating runners in pools matching requested labels.") orgAddCmd.Flags().StringVar(&orgWebhookSecret, "webhook-secret", "", "The webhook secret for this organization") - orgAddCmd.Flags().StringVar(&forgeType, "forge-type", "", "The forge type of the organization. Supported values: github, gitea.") orgAddCmd.Flags().StringVar(&orgCreds, "credentials", "", "Credentials name. See credentials list.") orgAddCmd.Flags().BoolVar(&orgRandomWebhookSecret, "random-webhook-secret", false, "Generate a random webhook secret for this organization.") orgAddCmd.Flags().BoolVar(&installOrgWebhook, "install-webhook", false, "Install the webhook as part of the add operation.") orgAddCmd.MarkFlagsMutuallyExclusive("webhook-secret", "random-webhook-secret") orgAddCmd.MarkFlagsOneRequired("webhook-secret", "random-webhook-secret") - orgListCmd.Flags().BoolVarP(&long, "long", "l", false, "Include additional info.") - orgListCmd.Flags().StringVarP(&orgName, "name", "n", "", "Exact org name to filter by.") - orgListCmd.Flags().StringVarP(&orgEndpoint, "endpoint", "e", "", "Exact endpoint name to filter by.") orgAddCmd.MarkFlagRequired("credentials") //nolint orgAddCmd.MarkFlagRequired("name") //nolint orgDeleteCmd.Flags().BoolVar(&keepOrgWebhook, "keep-webhook", false, "Do not delete any existing webhook when removing the organization from GARM.") - orgDeleteCmd.Flags().StringVar(&orgEndpoint, "endpoint", "", "When using the name of the org, the endpoint must be specified when multiple organizations with the same name exist.") - - orgShowCmd.Flags().StringVar(&orgEndpoint, "endpoint", "", "When using the name of the org, the endpoint must be specified when multiple organizations with the same name exist.") orgUpdateCmd.Flags().StringVar(&orgWebhookSecret, "webhook-secret", "", "The webhook secret for this organization") orgUpdateCmd.Flags().StringVar(&orgCreds, "credentials", "", "Credentials name. See credentials list.") orgUpdateCmd.Flags().StringVar(&poolBalancerType, "pool-balancer-type", "", "The balancing strategy to use when creating runners in pools matching requested labels.") - orgUpdateCmd.Flags().StringVar(&orgEndpoint, "endpoint", "", "When using the name of the org, the endpoint must be specified when multiple organizations with the same name exist.") orgWebhookInstallCmd.Flags().BoolVar(&insecureOrgWebhook, "insecure", false, "Ignore self signed certificate errors.") - orgWebhookInstallCmd.Flags().StringVar(&orgEndpoint, "endpoint", "", "When using the name of the org, the endpoint must be specified when multiple organizations with the same name exist.") - - orgWebhookUninstallCmd.Flags().StringVar(&orgEndpoint, "endpoint", "", "When using the name of the org, the endpoint must be specified when multiple organizations with the same name exist.") - - orgHookInfoShowCmd.Flags().StringVar(&orgEndpoint, "endpoint", "", "When using the name of the org, the endpoint must be specified when multiple organizations with the same name exist.") - orgWebhookCmd.AddCommand( orgWebhookInstallCmd, orgWebhookUninstallCmd, @@ -397,17 +347,13 @@ func formatOrganizations(orgs []params.Organization) { return } t := table.NewWriter() - header := table.Row{"ID", "Name", "Endpoint", "Credentials name", "Pool Balancer Type", "Forge type", "Pool mgr running"} + header := table.Row{"ID", "Name", "Endpoint", "Credentials name", "Pool Balancer Type", "Pool mgr running"} if long { header = append(header, "Created At", "Updated At") } t.AppendHeader(header) for _, val := range orgs { - forgeType := val.Endpoint.EndpointType - if forgeType == "" { - forgeType = params.GithubEndpointType - } - row := table.Row{val.ID, val.Name, val.Endpoint.Name, val.CredentialsName, val.GetBalancerType(), forgeType, val.PoolManagerStatus.IsRunning} + row := table.Row{val.ID, val.Name, val.Endpoint.Name, val.CredentialsName, val.GetBalancerType(), val.PoolManagerStatus.IsRunning} if long { row = append(row, val.CreatedAt, val.UpdatedAt) } @@ -433,6 +379,8 @@ func formatOneOrganization(org params.Organization) { t.AppendRow(table.Row{"Endpoint", org.Endpoint.Name}) t.AppendRow(table.Row{"Pool balancer type", org.GetBalancerType()}) t.AppendRow(table.Row{"Credentials", org.CredentialsName}) + t.AppendRow(table.Row{"Created at", org.CreatedAt}) + t.AppendRow(table.Row{"Updated at", org.UpdatedAt}) t.AppendRow(table.Row{"Pool manager running", org.PoolManagerStatus.IsRunning}) if !org.PoolManagerStatus.IsRunning { t.AppendRow(table.Row{"Failure reason", org.PoolManagerStatus.FailureReason}) @@ -442,14 +390,9 @@ func formatOneOrganization(org params.Organization) { t.AppendRow(table.Row{"Pools", pool.ID}, rowConfigAutoMerge) } } - if len(org.Events) > 0 { - for _, event := range org.Events { - t.AppendRow(table.Row{"Events", fmt.Sprintf("%s %s: %s", event.CreatedAt.Format("2006-01-02T15:04:05"), strings.ToUpper(string(event.EventLevel)), event.Message)}, rowConfigAutoMerge) - } - } t.SetColumnConfigs([]table.ColumnConfig{ {Number: 1, AutoMerge: true}, - {Number: 2, AutoMerge: false, WidthMax: 100}, + {Number: 2, AutoMerge: false}, }) fmt.Println(t.Render()) diff --git a/cmd/garm-cli/cmd/pool.go b/cmd/garm-cli/cmd/pool.go index 5b8cadf3..a4eee742 100644 --- a/cmd/garm-cli/cmd/pool.go +++ b/cmd/garm-cli/cmd/pool.go @@ -21,6 +21,7 @@ import ( "strings" "github.com/jedib0t/go-pretty/v6/table" + "github.com/pkg/errors" "github.com/spf13/cobra" commonParams "github.com/cloudbase/garm-provider-common/params" @@ -104,32 +105,23 @@ Example: switch len(args) { case 0: if cmd.Flags().Changed("repo") { - poolRepository, err = resolveRepository(poolRepository, endpointName) - if err != nil { - return err - } listRepoPoolsReq := apiClientRepos.NewListRepoPoolsParams() listRepoPoolsReq.RepoID = poolRepository response, err = apiCli.Repositories.ListRepoPools(listRepoPoolsReq, authToken) } else if cmd.Flags().Changed("org") { - poolOrganization, err = resolveOrganization(poolOrganization, endpointName) - if err != nil { - return err - } listOrgPoolsReq := apiClientOrgs.NewListOrgPoolsParams() listOrgPoolsReq.OrgID = poolOrganization response, err = apiCli.Organizations.ListOrgPools(listOrgPoolsReq, authToken) } else if cmd.Flags().Changed("enterprise") { - poolEnterprise, err = resolveEnterprise(poolEnterprise, endpointName) - if err != nil { - return err - } listEnterprisePoolsReq := apiClientEnterprises.NewListEnterprisePoolsParams() listEnterprisePoolsReq.EnterpriseID = poolEnterprise response, err = apiCli.Enterprises.ListEnterprisePools(listEnterprisePoolsReq, authToken) - } else { + } else if cmd.Flags().Changed("all") { listPoolsReq := apiClientPools.NewListPoolsParams() response, err = apiCli.Pools.ListPools(listPoolsReq, authToken) + } else { + cmd.Help() //nolint + os.Exit(0) } default: cmd.Help() //nolint @@ -258,28 +250,16 @@ var poolAddCmd = &cobra.Command{ var err error var response poolPayloadGetter if cmd.Flags().Changed("repo") { - poolRepository, err = resolveRepository(poolRepository, endpointName) - if err != nil { - return err - } newRepoPoolReq := apiClientRepos.NewCreateRepoPoolParams() newRepoPoolReq.RepoID = poolRepository newRepoPoolReq.Body = newPoolParams response, err = apiCli.Repositories.CreateRepoPool(newRepoPoolReq, authToken) } else if cmd.Flags().Changed("org") { - poolOrganization, err = resolveOrganization(poolOrganization, endpointName) - if err != nil { - return err - } newOrgPoolReq := apiClientOrgs.NewCreateOrgPoolParams() newOrgPoolReq.OrgID = poolOrganization newOrgPoolReq.Body = newPoolParams response, err = apiCli.Organizations.CreateOrgPool(newOrgPoolReq, authToken) } else if cmd.Flags().Changed("enterprise") { - poolEnterprise, err = resolveEnterprise(poolEnterprise, endpointName) - if err != nil { - return err - } newEnterprisePoolReq := apiClientEnterprises.NewCreateEnterprisePoolParams() newEnterprisePoolReq.EnterpriseID = poolEnterprise newEnterprisePoolReq.Body = newPoolParams @@ -405,12 +385,9 @@ func init() { poolListCmd.Flags().StringVarP(&poolRepository, "repo", "r", "", "List all pools within this repository.") poolListCmd.Flags().StringVarP(&poolOrganization, "org", "o", "", "List all pools within this organization.") poolListCmd.Flags().StringVarP(&poolEnterprise, "enterprise", "e", "", "List all pools within this enterprise.") - poolListCmd.Flags().BoolVarP(&poolAll, "all", "a", true, "List all pools, regardless of org or repo.") + poolListCmd.Flags().BoolVarP(&poolAll, "all", "a", false, "List all pools, regardless of org or repo.") poolListCmd.Flags().BoolVarP(&long, "long", "l", false, "Include additional info.") - poolListCmd.Flags().StringVar(&endpointName, "endpoint", "", "When using the name of an entity, the endpoint must be specified when multiple entities with the same name exist.") - - poolListCmd.Flags().MarkDeprecated("all", "all pools are listed by default in the absence of --repo, --org or --enterprise.") - poolListCmd.MarkFlagsMutuallyExclusive("repo", "org", "enterprise", "all") + poolListCmd.MarkFlagsMutuallyExclusive("repo", "org", "all", "enterprise") poolUpdateCmd.Flags().StringVar(&poolImage, "image", "", "The provider-specific image name to use for runners in this pool.") poolUpdateCmd.Flags().UintVar(&priority, "priority", 0, "When multiple pools match the same labels, priority dictates the order by which they are returned, in descending order.") @@ -443,8 +420,6 @@ func init() { poolAddCmd.Flags().UintVar(&poolRunnerBootstrapTimeout, "runner-bootstrap-timeout", 20, "Duration in minutes after which a runner is considered failed if it does not join Github.") poolAddCmd.Flags().UintVar(&poolMinIdleRunners, "min-idle-runners", 1, "Attempt to maintain a minimum of idle self-hosted runners of this type.") poolAddCmd.Flags().BoolVar(&poolEnabled, "enabled", false, "Enable this pool.") - poolAddCmd.Flags().StringVar(&endpointName, "endpoint", "", "When using the name of an entity, the endpoint must be specified when multiple entities with the same name exist.") - poolAddCmd.MarkFlagRequired("provider-name") //nolint poolAddCmd.MarkFlagRequired("image") //nolint poolAddCmd.MarkFlagRequired("flavor") //nolint @@ -470,7 +445,7 @@ func init() { func extraSpecsFromFile(specsFile string) (json.RawMessage, error) { data, err := os.ReadFile(specsFile) if err != nil { - return nil, fmt.Errorf("error opening specs file: %w", err) + return nil, errors.Wrap(err, "opening specs file") } return asRawMessage(data) } @@ -480,14 +455,14 @@ func asRawMessage(data []byte) (json.RawMessage, error) { // have a valid json. var unmarshaled interface{} if err := json.Unmarshal(data, &unmarshaled); err != nil { - return nil, fmt.Errorf("error decoding extra specs: %w", err) + return nil, errors.Wrap(err, "decoding extra specs") } var asRawJSON json.RawMessage var err error asRawJSON, err = json.Marshal(unmarshaled) if err != nil { - return nil, fmt.Errorf("error marshaling json: %w", err) + return nil, errors.Wrap(err, "marshaling json") } return asRawJSON, nil } @@ -501,7 +476,7 @@ func formatPools(pools []params.Pool) { t.SetColumnConfigs([]table.ColumnConfig{ {Number: 2, WidthMax: 40}, }) - header := table.Row{"ID", "Image", "Flavor", "Tags", "Belongs to", "Endpoint", "Forge Type", "Enabled"} + header := table.Row{"ID", "Image", "Flavor", "Tags", "Belongs to", "Enabled"} if long { header = append(header, "Level", "Created At", "Updated at", "Runner Prefix", "Priority") } @@ -518,15 +493,15 @@ func formatPools(pools []params.Pool) { switch { case pool.RepoID != "" && pool.RepoName != "": belongsTo = pool.RepoName - level = entityTypeRepo + level = "repo" case pool.OrgID != "" && pool.OrgName != "": belongsTo = pool.OrgName - level = entityTypeOrg + level = "org" case pool.EnterpriseID != "" && pool.EnterpriseName != "": belongsTo = pool.EnterpriseName - level = entityTypeEnterprise + level = "enterprise" } - row := table.Row{pool.ID, pool.Image, pool.Flavor, strings.Join(tags, " "), belongsTo, pool.Endpoint.Name, pool.Endpoint.EndpointType, pool.Enabled} + row := table.Row{pool.ID, pool.Image, pool.Flavor, strings.Join(tags, " "), belongsTo, pool.Enabled} if long { row = append(row, level, pool.CreatedAt, pool.UpdatedAt, pool.GetRunnerPrefix(), pool.Priority) } @@ -557,13 +532,13 @@ func formatOnePool(pool params.Pool) { switch { case pool.RepoID != "" && pool.RepoName != "": belongsTo = pool.RepoName - level = entityTypeRepo + level = "repo" case pool.OrgID != "" && pool.OrgName != "": belongsTo = pool.OrgName - level = entityTypeOrg + level = "org" case pool.EnterpriseID != "" && pool.EnterpriseName != "": belongsTo = pool.EnterpriseName - level = entityTypeEnterprise + level = "enterprise" } t.AppendHeader(header) @@ -586,8 +561,6 @@ func formatOnePool(pool params.Pool) { t.AppendRow(table.Row{"Runner Prefix", pool.GetRunnerPrefix()}) t.AppendRow(table.Row{"Extra specs", string(pool.ExtraSpecs)}) t.AppendRow(table.Row{"GitHub Runner Group", pool.GitHubRunnerGroup}) - t.AppendRow(table.Row{"Forge Type", pool.Endpoint.EndpointType}) - t.AppendRow(table.Row{"Endpoint Name", pool.Endpoint.Name}) if len(pool.Instances) > 0 { for _, instance := range pool.Instances { diff --git a/cmd/garm-cli/cmd/repository.go b/cmd/garm-cli/cmd/repository.go index cca1a7fe..1c453836 100644 --- a/cmd/garm-cli/cmd/repository.go +++ b/cmd/garm-cli/cmd/repository.go @@ -16,7 +16,6 @@ package cmd import ( "fmt" - "strings" "github.com/jedib0t/go-pretty/v6/table" "github.com/spf13/cobra" @@ -30,10 +29,8 @@ import ( var ( repoOwner string repoName string - repoEndpoint string repoWebhookSecret string repoCreds string - forgeType string randomWebhookSecret bool insecureRepoWebhook bool keepRepoWebhook bool @@ -78,13 +75,8 @@ var repoWebhookInstallCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } - repoID, err := resolveRepository(args[0], repoEndpoint) - if err != nil { - return err - } - installWebhookReq := apiClientRepos.NewInstallRepoWebhookParams() - installWebhookReq.RepoID = repoID + installWebhookReq.RepoID = args[0] installWebhookReq.Body.InsecureSSL = insecureRepoWebhook installWebhookReq.Body.WebhookEndpointType = params.WebhookEndpointDirect @@ -113,13 +105,8 @@ var repoHookInfoShowCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } - repoID, err := resolveRepository(args[0], repoEndpoint) - if err != nil { - return err - } - showWebhookInfoReq := apiClientRepos.NewGetRepoWebhookInfoParams() - showWebhookInfoReq.RepoID = repoID + showWebhookInfoReq.RepoID = args[0] response, err := apiCli.Repositories.GetRepoWebhookInfo(showWebhookInfoReq, authToken) if err != nil { @@ -146,15 +133,10 @@ var repoWebhookUninstallCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } - repoID, err := resolveRepository(args[0], repoEndpoint) - if err != nil { - return err - } - uninstallWebhookReq := apiClientRepos.NewUninstallRepoWebhookParams() - uninstallWebhookReq.RepoID = repoID + uninstallWebhookReq.RepoID = args[0] - err = apiCli.Repositories.UninstallRepoWebhook(uninstallWebhookReq, authToken) + err := apiCli.Repositories.UninstallRepoWebhook(uninstallWebhookReq, authToken) if err != nil { return err } @@ -187,7 +169,6 @@ var repoAddCmd = &cobra.Command{ Name: repoName, WebhookSecret: repoWebhookSecret, CredentialsName: repoCreds, - ForgeType: params.EndpointType(forgeType), PoolBalancerType: params.PoolBalancerType(poolBalancerType), } response, err := apiCli.Repositories.CreateRepo(newRepoReq, authToken) @@ -229,9 +210,6 @@ var repoListCmd = &cobra.Command{ } listReposReq := apiClientRepos.NewListReposParams() - listReposReq.Name = &repoName - listReposReq.Owner = &repoOwner - listReposReq.Endpoint = &repoEndpoint response, err := apiCli.Repositories.ListRepos(listReposReq, authToken) if err != nil { return err @@ -258,19 +236,13 @@ var repoUpdateCmd = &cobra.Command{ if len(args) > 1 { return fmt.Errorf("too many arguments") } - - repoID, err := resolveRepository(args[0], repoEndpoint) - if err != nil { - return err - } - updateReposReq := apiClientRepos.NewUpdateRepoParams() updateReposReq.Body = params.UpdateEntityParams{ WebhookSecret: repoWebhookSecret, CredentialsName: repoCreds, PoolBalancerType: params.PoolBalancerType(poolBalancerType), } - updateReposReq.RepoID = repoID + updateReposReq.RepoID = args[0] response, err := apiCli.Repositories.UpdateRepo(updateReposReq, authToken) if err != nil { @@ -296,14 +268,8 @@ var repoShowCmd = &cobra.Command{ if len(args) > 1 { return fmt.Errorf("too many arguments") } - - repoID, err := resolveRepository(args[0], repoEndpoint) - if err != nil { - return err - } - showRepoReq := apiClientRepos.NewGetRepoParams() - showRepoReq.RepoID = repoID + showRepoReq.RepoID = args[0] response, err := apiCli.Repositories.GetRepo(showRepoReq, authToken) if err != nil { return err @@ -329,14 +295,8 @@ var repoDeleteCmd = &cobra.Command{ if len(args) > 1 { return fmt.Errorf("too many arguments") } - - repoID, err := resolveRepository(args[0], repoEndpoint) - if err != nil { - return err - } - deleteRepoReq := apiClientRepos.NewDeleteRepoParams() - deleteRepoReq.RepoID = repoID + deleteRepoReq.RepoID = args[0] deleteRepoReq.KeepWebhook = &keepRepoWebhook if err := apiCli.Repositories.DeleteRepo(deleteRepoReq, authToken); err != nil { return err @@ -349,7 +309,6 @@ func init() { repoAddCmd.Flags().StringVar(&repoOwner, "owner", "", "The owner of this repository") repoAddCmd.Flags().StringVar(&poolBalancerType, "pool-balancer-type", string(params.PoolBalancerTypeRoundRobin), "The balancing strategy to use when creating runners in pools matching requested labels.") repoAddCmd.Flags().StringVar(&repoName, "name", "", "The name of the repository") - repoAddCmd.Flags().StringVar(&forgeType, "forge-type", "", "The forge type of the repository. Supported values: github, gitea.") repoAddCmd.Flags().StringVar(&repoWebhookSecret, "webhook-secret", "", "The webhook secret for this repository") repoAddCmd.Flags().StringVar(&repoCreds, "credentials", "", "Credentials name. See credentials list.") repoAddCmd.Flags().BoolVar(&randomWebhookSecret, "random-webhook-secret", false, "Generate a random webhook secret for this repository.") @@ -358,30 +317,18 @@ func init() { repoAddCmd.MarkFlagsOneRequired("webhook-secret", "random-webhook-secret") repoListCmd.Flags().BoolVarP(&long, "long", "l", false, "Include additional info.") - repoListCmd.Flags().StringVarP(&repoName, "name", "n", "", "Exact repo name to filter by.") - repoListCmd.Flags().StringVarP(&repoOwner, "owner", "o", "", "Exact repo owner to filter by.") - repoListCmd.Flags().StringVarP(&repoEndpoint, "endpoint", "e", "", "Exact endpoint name to filter by.") repoAddCmd.MarkFlagRequired("credentials") //nolint repoAddCmd.MarkFlagRequired("owner") //nolint repoAddCmd.MarkFlagRequired("name") //nolint repoDeleteCmd.Flags().BoolVar(&keepRepoWebhook, "keep-webhook", false, "Do not delete any existing webhook when removing the repo from GARM.") - repoDeleteCmd.Flags().StringVar(&repoEndpoint, "endpoint", "", "When using the name of the repo, the endpoint must be specified when multiple repositories with the same name exist.") - - repoShowCmd.Flags().StringVar(&repoEndpoint, "endpoint", "", "When using the name of the repo, the endpoint must be specified when multiple repositories with the same name exist.") repoUpdateCmd.Flags().StringVar(&repoWebhookSecret, "webhook-secret", "", "The webhook secret for this repository. If you update this secret, you will have to manually update the secret in GitHub as well.") repoUpdateCmd.Flags().StringVar(&repoCreds, "credentials", "", "Credentials name. See credentials list.") repoUpdateCmd.Flags().StringVar(&poolBalancerType, "pool-balancer-type", "", "The balancing strategy to use when creating runners in pools matching requested labels.") - repoUpdateCmd.Flags().StringVar(&repoEndpoint, "endpoint", "", "When using the name of the repo, the endpoint must be specified when multiple repositories with the same name exist.") repoWebhookInstallCmd.Flags().BoolVar(&insecureRepoWebhook, "insecure", false, "Ignore self signed certificate errors.") - repoWebhookInstallCmd.Flags().StringVar(&repoEndpoint, "endpoint", "", "When using the name of the repo, the endpoint must be specified when multiple repositories with the same name exist.") - - repoWebhookUninstallCmd.Flags().StringVar(&repoEndpoint, "endpoint", "", "When using the name of the repo, the endpoint must be specified when multiple repositories with the same name exist.") - - repoHookInfoShowCmd.Flags().StringVar(&repoEndpoint, "endpoint", "", "When using the name of the repo, the endpoint must be specified when multiple repositories with the same name exist.") repoWebhookCmd.AddCommand( repoWebhookInstallCmd, @@ -407,17 +354,13 @@ func formatRepositories(repos []params.Repository) { return } t := table.NewWriter() - header := table.Row{"ID", "Owner", "Name", "Endpoint", "Credentials name", "Pool Balancer Type", "Forge type", "Pool mgr running"} + header := table.Row{"ID", "Owner", "Name", "Endpoint", "Credentials name", "Pool Balancer Type", "Pool mgr running"} if long { header = append(header, "Created At", "Updated At") } t.AppendHeader(header) for _, val := range repos { - forgeType := val.Endpoint.EndpointType - if forgeType == "" { - forgeType = params.GithubEndpointType - } - row := table.Row{val.ID, val.Owner, val.Name, val.Endpoint.Name, val.GetCredentialsName(), val.GetBalancerType(), forgeType, val.PoolManagerStatus.IsRunning} + row := table.Row{val.ID, val.Owner, val.Name, val.Endpoint.Name, val.CredentialsName, val.GetBalancerType(), val.PoolManagerStatus.IsRunning} if long { row = append(row, val.CreatedAt, val.UpdatedAt) } @@ -443,7 +386,7 @@ func formatOneRepository(repo params.Repository) { t.AppendRow(table.Row{"Name", repo.Name}) t.AppendRow(table.Row{"Endpoint", repo.Endpoint.Name}) t.AppendRow(table.Row{"Pool balancer type", repo.GetBalancerType()}) - t.AppendRow(table.Row{"Credentials", repo.GetCredentialsName()}) + t.AppendRow(table.Row{"Credentials", repo.CredentialsName}) t.AppendRow(table.Row{"Pool manager running", repo.PoolManagerStatus.IsRunning}) if !repo.PoolManagerStatus.IsRunning { t.AppendRow(table.Row{"Failure reason", repo.PoolManagerStatus.FailureReason}) @@ -454,16 +397,9 @@ func formatOneRepository(repo params.Repository) { t.AppendRow(table.Row{"Pools", pool.ID}, rowConfigAutoMerge) } } - - if len(repo.Events) > 0 { - for _, event := range repo.Events { - t.AppendRow(table.Row{"Events", fmt.Sprintf("%s %s: %s", event.CreatedAt.Format("2006-01-02T15:04:05"), strings.ToUpper(string(event.EventLevel)), event.Message)}, rowConfigAutoMerge) - } - } - t.SetColumnConfigs([]table.ColumnConfig{ {Number: 1, AutoMerge: true}, - {Number: 2, AutoMerge: false, WidthMax: 100}, + {Number: 2, AutoMerge: false}, }) fmt.Println(t.Render()) diff --git a/cmd/garm-cli/cmd/root.go b/cmd/garm-cli/cmd/root.go index df3ef11b..d1370567 100644 --- a/cmd/garm-cli/cmd/root.go +++ b/cmd/garm-cli/cmd/root.go @@ -31,12 +31,6 @@ import ( "github.com/cloudbase/garm/params" ) -const ( - entityTypeOrg string = "org" - entityTypeRepo string = "repo" - entityTypeEnterprise string = "enterprise" -) - var ( cfg *config.Config mgr config.Manager diff --git a/cmd/garm-cli/cmd/runner.go b/cmd/garm-cli/cmd/runner.go index 44a7b8df..08b9a6db 100644 --- a/cmd/garm-cli/cmd/runner.go +++ b/cmd/garm-cli/cmd/runner.go @@ -104,32 +104,23 @@ Example: response, err = apiCli.Instances.ListPoolInstances(listPoolInstancesReq, authToken) case 0: if cmd.Flags().Changed("repo") { - runnerRepo, resErr := resolveRepository(runnerRepository, endpointName) - if resErr != nil { - return resErr - } listRepoInstancesReq := apiClientRepos.NewListRepoInstancesParams() - listRepoInstancesReq.RepoID = runnerRepo + listRepoInstancesReq.RepoID = runnerRepository response, err = apiCli.Repositories.ListRepoInstances(listRepoInstancesReq, authToken) } else if cmd.Flags().Changed("org") { - runnerOrg, resErr := resolveOrganization(runnerOrganization, endpointName) - if resErr != nil { - return resErr - } listOrgInstancesReq := apiClientOrgs.NewListOrgInstancesParams() - listOrgInstancesReq.OrgID = runnerOrg + listOrgInstancesReq.OrgID = runnerOrganization response, err = apiCli.Organizations.ListOrgInstances(listOrgInstancesReq, authToken) } else if cmd.Flags().Changed("enterprise") { - runnerEnt, resErr := resolveEnterprise(runnerEnterprise, endpointName) - if resErr != nil { - return resErr - } listEnterpriseInstancesReq := apiClientEnterprises.NewListEnterpriseInstancesParams() - listEnterpriseInstancesReq.EnterpriseID = runnerEnt + listEnterpriseInstancesReq.EnterpriseID = runnerEnterprise response, err = apiCli.Enterprises.ListEnterpriseInstances(listEnterpriseInstancesReq, authToken) - } else { + } else if cmd.Flags().Changed("all") { listInstancesReq := apiClientInstances.NewListInstancesParams() response, err = apiCli.Instances.ListInstances(listInstancesReq, authToken) + } else { + cmd.Help() //nolint + os.Exit(0) } default: cmd.Help() //nolint @@ -214,12 +205,9 @@ func init() { runnerListCmd.Flags().StringVarP(&runnerRepository, "repo", "r", "", "List all runners from all pools within this repository.") runnerListCmd.Flags().StringVarP(&runnerOrganization, "org", "o", "", "List all runners from all pools within this organization.") runnerListCmd.Flags().StringVarP(&runnerEnterprise, "enterprise", "e", "", "List all runners from all pools within this enterprise.") - runnerListCmd.Flags().BoolVarP(&runnerAll, "all", "a", true, "List all runners, regardless of org or repo. (deprecated)") + runnerListCmd.Flags().BoolVarP(&runnerAll, "all", "a", false, "List all runners, regardless of org or repo.") runnerListCmd.Flags().BoolVarP(&long, "long", "l", false, "Include additional info.") runnerListCmd.MarkFlagsMutuallyExclusive("repo", "org", "enterprise", "all") - runnerListCmd.Flags().StringVar(&endpointName, "endpoint", "", "When using the name of an entity, the endpoint must be specified when multiple entities with the same name exist.") - - runnerListCmd.Flags().MarkDeprecated("all", "all runners are listed by default in the absence of --repo, --org or --enterprise.") runnerDeleteCmd.Flags().BoolVarP(&forceRemove, "force-remove-runner", "f", false, "Forcefully remove a runner. If set to true, GARM will ignore provider errors when removing the runner.") runnerDeleteCmd.Flags().BoolVarP(&bypassGHUnauthorized, "bypass-github-unauthorized", "b", false, "Ignore Unauthorized errors from GitHub and proceed with removing runner from provider and DB. This is useful when credentials are no longer valid and you want to remove your runners. Warning, this has the potential to leave orphaned runners in GitHub. You will need to update your credentials to properly consolidate.") @@ -240,14 +228,14 @@ func formatInstances(param []params.Instance, detailed bool) { return } t := table.NewWriter() - header := table.Row{"Nr", "Name", "Status", "Runner Status", "Pool ID", "Scalse Set ID"} + header := table.Row{"Nr", "Name", "Status", "Runner Status", "Pool ID"} if detailed { header = append(header, "Created At", "Updated At", "Job Name", "Started At", "Run ID", "Repository") } t.AppendHeader(header) for idx, inst := range param { - row := table.Row{idx + 1, inst.Name, inst.Status, inst.RunnerStatus, inst.PoolID, inst.ScaleSetID} + row := table.Row{idx + 1, inst.Name, inst.Status, inst.RunnerStatus, inst.PoolID} if detailed { row = append(row, inst.CreatedAt, inst.UpdatedAt) if inst.Job != nil { @@ -282,11 +270,7 @@ func formatSingleInstance(instance params.Instance) { t.AppendRow(table.Row{"OS Version", instance.OSVersion}, table.RowConfig{AutoMerge: false}) t.AppendRow(table.Row{"Status", instance.Status}, table.RowConfig{AutoMerge: false}) t.AppendRow(table.Row{"Runner Status", instance.RunnerStatus}, table.RowConfig{AutoMerge: false}) - if instance.PoolID != "" { - t.AppendRow(table.Row{"Pool ID", instance.PoolID}, table.RowConfig{AutoMerge: false}) - } else if instance.ScaleSetID != 0 { - t.AppendRow(table.Row{"Scale Set ID", instance.ScaleSetID}, table.RowConfig{AutoMerge: false}) - } + t.AppendRow(table.Row{"Pool ID", instance.PoolID}, table.RowConfig{AutoMerge: false}) if len(instance.Addresses) > 0 { for _, addr := range instance.Addresses { diff --git a/cmd/garm-cli/cmd/scalesets.go b/cmd/garm-cli/cmd/scalesets.go deleted file mode 100644 index a78fe33f..00000000 --- a/cmd/garm-cli/cmd/scalesets.go +++ /dev/null @@ -1,539 +0,0 @@ -// Copyright 2022 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package cmd - -import ( - "fmt" - "os" - - "github.com/jedib0t/go-pretty/v6/table" - "github.com/spf13/cobra" - - commonParams "github.com/cloudbase/garm-provider-common/params" - apiClientEnterprises "github.com/cloudbase/garm/client/enterprises" - apiClientOrgs "github.com/cloudbase/garm/client/organizations" - apiClientRepos "github.com/cloudbase/garm/client/repositories" - apiClientScaleSets "github.com/cloudbase/garm/client/scalesets" - "github.com/cloudbase/garm/cmd/garm-cli/common" - "github.com/cloudbase/garm/params" -) - -var ( - scalesetProvider string - scalesetMaxRunners uint - scalesetMinIdleRunners uint - scalesetRunnerPrefix string - scalesetName string - scalesetImage string - scalesetFlavor string - scalesetOSType string - scalesetOSArch string - scalesetEnabled bool - scalesetRunnerBootstrapTimeout uint - scalesetRepository string - scalesetOrganization string - scalesetEnterprise string - scalesetExtraSpecsFile string - scalesetExtraSpecs string - scalesetGitHubRunnerGroup string -) - -type scalesetPayloadGetter interface { - GetPayload() params.ScaleSet -} - -type scalesetsPayloadGetter interface { - GetPayload() params.ScaleSets -} - -// scalesetCmd represents the scale set command -var scalesetCmd = &cobra.Command{ - Use: "scaleset", - SilenceUsage: true, - Short: "List scale sets", - Long: `Query information or perform operations on scale sets.`, - Run: nil, -} - -var scalesetListCmd = &cobra.Command{ - Use: "list", - Aliases: []string{"ls"}, - Short: "List scale sets", - Long: `List scale sets of repositories, orgs or all of the above. - -This command will list scale sets from one repo, one org or all scale sets -on the system. The list flags are mutually exclusive. You must however -specify one of them. - -Example: - - List scalesets from one repo: - garm-cli scaleset list --repo=05e7eac6-4705-486d-89c9-0170bbb576af - - List scalesets from one org: - garm-cli scaleset list --org=5493e51f-3170-4ce3-9f05-3fe690fc6ec6 - - List scalesets from one enterprise: - garm-cli scaleset list --enterprise=a8ee4c66-e762-4cbe-a35d-175dba2c9e62 - - List all scalesets from all repos, orgs and enterprises: - garm-cli scaleset list --all - -`, - SilenceUsage: true, - RunE: func(cmd *cobra.Command, args []string) error { - if needsInit { - return errNeedsInitError - } - - var response scalesetsPayloadGetter - var err error - - switch len(args) { - case 0: - if cmd.Flags().Changed("repo") { - scalesetRepository, err = resolveRepository(scalesetRepository, endpointName) - if err != nil { - return err - } - listRepoScaleSetsReq := apiClientRepos.NewListRepoScaleSetsParams() - listRepoScaleSetsReq.RepoID = scalesetRepository - response, err = apiCli.Repositories.ListRepoScaleSets(listRepoScaleSetsReq, authToken) - } else if cmd.Flags().Changed("org") { - scalesetOrganization, err = resolveOrganization(scalesetOrganization, endpointName) - if err != nil { - return err - } - listOrgScaleSetsReq := apiClientOrgs.NewListOrgScaleSetsParams() - listOrgScaleSetsReq.OrgID = scalesetOrganization - response, err = apiCli.Organizations.ListOrgScaleSets(listOrgScaleSetsReq, authToken) - } else if cmd.Flags().Changed("enterprise") { - scalesetEnterprise, err = resolveEnterprise(scalesetEnterprise, endpointName) - if err != nil { - return err - } - listEnterpriseScaleSetsReq := apiClientEnterprises.NewListEnterpriseScaleSetsParams() - listEnterpriseScaleSetsReq.EnterpriseID = scalesetEnterprise - response, err = apiCli.Enterprises.ListEnterpriseScaleSets(listEnterpriseScaleSetsReq, authToken) - } else { - listScaleSetsReq := apiClientScaleSets.NewListScalesetsParams() - response, err = apiCli.Scalesets.ListScalesets(listScaleSetsReq, authToken) - } - default: - cmd.Help() //nolint - os.Exit(0) - } - - if err != nil { - return err - } - formatScaleSets(response.GetPayload()) - return nil - }, -} - -var scaleSetShowCmd = &cobra.Command{ - Use: "show", - Short: "Show details for a scale set", - Long: `Displays a detailed view of a single scale set.`, - SilenceUsage: true, - RunE: func(_ *cobra.Command, args []string) error { - if needsInit { - return errNeedsInitError - } - - if len(args) == 0 { - return fmt.Errorf("requires a scale set ID") - } - - if len(args) > 1 { - return fmt.Errorf("too many arguments") - } - - getScaleSetReq := apiClientScaleSets.NewGetScaleSetParams() - getScaleSetReq.ScalesetID = args[0] - response, err := apiCli.Scalesets.GetScaleSet(getScaleSetReq, authToken) - if err != nil { - return err - } - formatOneScaleSet(response.Payload) - return nil - }, -} - -var scaleSetDeleteCmd = &cobra.Command{ - Use: "delete", - Aliases: []string{"remove", "rm", "del"}, - Short: "Delete scale set by ID", - Long: `Delete one scale set by referencing it's ID, regardless of repo or org.`, - SilenceUsage: true, - RunE: func(_ *cobra.Command, args []string) error { - if needsInit { - return errNeedsInitError - } - - if len(args) == 0 { - return fmt.Errorf("requires a scale set ID") - } - - if len(args) > 1 { - return fmt.Errorf("too many arguments") - } - - deleteScaleSetReq := apiClientScaleSets.NewDeleteScaleSetParams() - deleteScaleSetReq.ScalesetID = args[0] - if err := apiCli.Scalesets.DeleteScaleSet(deleteScaleSetReq, authToken); err != nil { - return err - } - return nil - }, -} - -var scaleSetAddCmd = &cobra.Command{ - Use: "add", - Aliases: []string{"create"}, - Short: "Add scale set", - Long: `Add a new scale set.`, - SilenceUsage: true, - RunE: func(cmd *cobra.Command, _ []string) error { - if needsInit { - return errNeedsInitError - } - - newScaleSetParams := params.CreateScaleSetParams{ - RunnerPrefix: params.RunnerPrefix{ - Prefix: scalesetRunnerPrefix, - }, - ProviderName: scalesetProvider, - Name: scalesetName, - MaxRunners: scalesetMaxRunners, - MinIdleRunners: scalesetMinIdleRunners, - Image: scalesetImage, - Flavor: scalesetFlavor, - OSType: commonParams.OSType(scalesetOSType), - OSArch: commonParams.OSArch(scalesetOSArch), - Enabled: scalesetEnabled, - RunnerBootstrapTimeout: scalesetRunnerBootstrapTimeout, - GitHubRunnerGroup: scalesetGitHubRunnerGroup, - } - - if cmd.Flags().Changed("extra-specs") { - data, err := asRawMessage([]byte(scalesetExtraSpecs)) - if err != nil { - return err - } - newScaleSetParams.ExtraSpecs = data - } - - if scalesetExtraSpecsFile != "" { - data, err := extraSpecsFromFile(scalesetExtraSpecsFile) - if err != nil { - return err - } - newScaleSetParams.ExtraSpecs = data - } - - if err := newScaleSetParams.Validate(); err != nil { - return err - } - - var err error - var response scalesetPayloadGetter - if cmd.Flags().Changed("repo") { - scalesetRepository, err = resolveRepository(scalesetRepository, endpointName) - if err != nil { - return err - } - newRepoScaleSetReq := apiClientRepos.NewCreateRepoScaleSetParams() - newRepoScaleSetReq.RepoID = scalesetRepository - newRepoScaleSetReq.Body = newScaleSetParams - response, err = apiCli.Repositories.CreateRepoScaleSet(newRepoScaleSetReq, authToken) - } else if cmd.Flags().Changed("org") { - scalesetOrganization, err = resolveOrganization(scalesetOrganization, endpointName) - if err != nil { - return err - } - newOrgScaleSetReq := apiClientOrgs.NewCreateOrgScaleSetParams() - newOrgScaleSetReq.OrgID = scalesetOrganization - newOrgScaleSetReq.Body = newScaleSetParams - response, err = apiCli.Organizations.CreateOrgScaleSet(newOrgScaleSetReq, authToken) - } else if cmd.Flags().Changed("enterprise") { - scalesetEnterprise, err = resolveEnterprise(scalesetEnterprise, endpointName) - if err != nil { - return err - } - newEnterpriseScaleSetReq := apiClientEnterprises.NewCreateEnterpriseScaleSetParams() - newEnterpriseScaleSetReq.EnterpriseID = scalesetEnterprise - newEnterpriseScaleSetReq.Body = newScaleSetParams - response, err = apiCli.Enterprises.CreateEnterpriseScaleSet(newEnterpriseScaleSetReq, authToken) - } else { - cmd.Help() //nolint - os.Exit(0) - } - - if err != nil { - return err - } - - formatOneScaleSet(response.GetPayload()) - return nil - }, -} - -var scaleSetUpdateCmd = &cobra.Command{ - Use: "update", - Short: "Update one scale set", - Long: `Updates scale set characteristics. - -This command updates the scale set characteristics. Runners already created prior to updating -the scale set, will not be recreated. If they no longer suit your needs, you will need to -explicitly remove them using the runner delete command. - `, - SilenceUsage: true, - RunE: func(cmd *cobra.Command, args []string) error { - if needsInit { - return errNeedsInitError - } - - if len(args) == 0 { - return fmt.Errorf("command requires a scale set ID") - } - - if len(args) > 1 { - return fmt.Errorf("too many arguments") - } - - updateScaleSetReq := apiClientScaleSets.NewUpdateScaleSetParams() - scaleSetUpdateParams := params.UpdateScaleSetParams{} - - if cmd.Flags().Changed("image") { - scaleSetUpdateParams.Image = scalesetImage - } - - if cmd.Flags().Changed("name") { - scaleSetUpdateParams.Name = scalesetName - } - - if cmd.Flags().Changed("flavor") { - scaleSetUpdateParams.Flavor = scalesetFlavor - } - - if cmd.Flags().Changed("os-type") { - scaleSetUpdateParams.OSType = commonParams.OSType(scalesetOSType) - } - - if cmd.Flags().Changed("os-arch") { - scaleSetUpdateParams.OSArch = commonParams.OSArch(scalesetOSArch) - } - - if cmd.Flags().Changed("max-runners") { - scaleSetUpdateParams.MaxRunners = &scalesetMaxRunners - } - - if cmd.Flags().Changed("min-idle-runners") { - scaleSetUpdateParams.MinIdleRunners = &scalesetMinIdleRunners - } - - if cmd.Flags().Changed("runner-prefix") { - scaleSetUpdateParams.RunnerPrefix = params.RunnerPrefix{ - Prefix: scalesetRunnerPrefix, - } - } - - if cmd.Flags().Changed("runner-group") { - scaleSetUpdateParams.GitHubRunnerGroup = &scalesetGitHubRunnerGroup - } - - if cmd.Flags().Changed("enabled") { - scaleSetUpdateParams.Enabled = &scalesetEnabled - } - - if cmd.Flags().Changed("runner-bootstrap-timeout") { - scaleSetUpdateParams.RunnerBootstrapTimeout = &scalesetRunnerBootstrapTimeout - } - - if cmd.Flags().Changed("extra-specs") { - data, err := asRawMessage([]byte(scalesetExtraSpecs)) - if err != nil { - return err - } - scaleSetUpdateParams.ExtraSpecs = data - } - - if scalesetExtraSpecsFile != "" { - data, err := extraSpecsFromFile(scalesetExtraSpecsFile) - if err != nil { - return err - } - scaleSetUpdateParams.ExtraSpecs = data - } - - updateScaleSetReq.ScalesetID = args[0] - updateScaleSetReq.Body = scaleSetUpdateParams - response, err := apiCli.Scalesets.UpdateScaleSet(updateScaleSetReq, authToken) - if err != nil { - return err - } - - formatOneScaleSet(response.Payload) - return nil - }, -} - -func init() { - scalesetListCmd.Flags().StringVarP(&scalesetRepository, "repo", "r", "", "List all scale sets within this repository.") - scalesetListCmd.Flags().StringVarP(&scalesetOrganization, "org", "o", "", "List all scale sets within this organization.") - scalesetListCmd.Flags().StringVarP(&scalesetEnterprise, "enterprise", "e", "", "List all scale sets within this enterprise.") - scalesetListCmd.MarkFlagsMutuallyExclusive("repo", "org", "enterprise") - scalesetListCmd.Flags().StringVar(&endpointName, "endpoint", "", "When using the name of an entity, the endpoint must be specified when multiple entities with the same name exist.") - - scaleSetUpdateCmd.Flags().StringVar(&scalesetImage, "image", "", "The provider-specific image name to use for runners in this scale set.") - scaleSetUpdateCmd.Flags().StringVar(&scalesetFlavor, "flavor", "", "The flavor to use for the runners in this scale set.") - scaleSetUpdateCmd.Flags().StringVar(&scalesetName, "name", "", "The name of the scale set. This option is mandatory.") - scaleSetUpdateCmd.Flags().StringVar(&scalesetOSType, "os-type", "linux", "Operating system type (windows, linux, etc).") - scaleSetUpdateCmd.Flags().StringVar(&scalesetOSArch, "os-arch", "amd64", "Operating system architecture (amd64, arm, etc).") - scaleSetUpdateCmd.Flags().StringVar(&scalesetRunnerPrefix, "runner-prefix", "", "The name prefix to use for runners in this scale set.") - scaleSetUpdateCmd.Flags().UintVar(&scalesetMaxRunners, "max-runners", 5, "The maximum number of runner this scale set will create.") - scaleSetUpdateCmd.Flags().UintVar(&scalesetMinIdleRunners, "min-idle-runners", 1, "Attempt to maintain a minimum of idle self-hosted runners of this type.") - scaleSetUpdateCmd.Flags().StringVar(&scalesetGitHubRunnerGroup, "runner-group", "", "The GitHub runner group in which all runners of this scale set will be added.") - scaleSetUpdateCmd.Flags().BoolVar(&scalesetEnabled, "enabled", false, "Enable this scale set.") - scaleSetUpdateCmd.Flags().UintVar(&scalesetRunnerBootstrapTimeout, "runner-bootstrap-timeout", 20, "Duration in minutes after which a runner is considered failed if it does not join Github.") - scaleSetUpdateCmd.Flags().StringVar(&scalesetExtraSpecsFile, "extra-specs-file", "", "A file containing a valid json which will be passed to the IaaS provider managing the scale set.") - scaleSetUpdateCmd.Flags().StringVar(&scalesetExtraSpecs, "extra-specs", "", "A valid json which will be passed to the IaaS provider managing the scale set.") - scaleSetUpdateCmd.MarkFlagsMutuallyExclusive("extra-specs-file", "extra-specs") - - scaleSetAddCmd.Flags().StringVar(&scalesetProvider, "provider-name", "", "The name of the provider where runners will be created.") - scaleSetAddCmd.Flags().StringVar(&scalesetImage, "image", "", "The provider-specific image name to use for runners in this scale set.") - scaleSetAddCmd.Flags().StringVar(&scalesetName, "name", "", "The name of the scale set. This option is mandatory.") - scaleSetAddCmd.Flags().StringVar(&scalesetFlavor, "flavor", "", "The flavor to use for this runner.") - scaleSetAddCmd.Flags().StringVar(&scalesetRunnerPrefix, "runner-prefix", "", "The name prefix to use for runners in this scale set.") - scaleSetAddCmd.Flags().StringVar(&scalesetOSType, "os-type", "linux", "Operating system type (windows, linux, etc).") - scaleSetAddCmd.Flags().StringVar(&scalesetOSArch, "os-arch", "amd64", "Operating system architecture (amd64, arm, etc).") - scaleSetAddCmd.Flags().StringVar(&scalesetExtraSpecsFile, "extra-specs-file", "", "A file containing a valid json which will be passed to the IaaS provider managing the scale set.") - scaleSetAddCmd.Flags().StringVar(&scalesetExtraSpecs, "extra-specs", "", "A valid json which will be passed to the IaaS provider managing the scale set.") - scaleSetAddCmd.Flags().StringVar(&scalesetGitHubRunnerGroup, "runner-group", "", "The GitHub runner group in which all runners of this scale set will be added.") - scaleSetAddCmd.Flags().UintVar(&scalesetMaxRunners, "max-runners", 5, "The maximum number of runner this scale set will create.") - scaleSetAddCmd.Flags().UintVar(&scalesetRunnerBootstrapTimeout, "runner-bootstrap-timeout", 20, "Duration in minutes after which a runner is considered failed if it does not join Github.") - scaleSetAddCmd.Flags().UintVar(&scalesetMinIdleRunners, "min-idle-runners", 1, "Attempt to maintain a minimum of idle self-hosted runners of this type.") - scaleSetAddCmd.Flags().BoolVar(&scalesetEnabled, "enabled", false, "Enable this scale set.") - scaleSetAddCmd.Flags().StringVar(&endpointName, "endpoint", "", "When using the name of an entity, the endpoint must be specified when multiple entities with the same name exist.") - scaleSetAddCmd.MarkFlagRequired("provider-name") //nolint - scaleSetAddCmd.MarkFlagRequired("name") //nolint - scaleSetAddCmd.MarkFlagRequired("image") //nolint - scaleSetAddCmd.MarkFlagRequired("flavor") //nolint - - scaleSetAddCmd.Flags().StringVarP(&scalesetRepository, "repo", "r", "", "Add the new scale set within this repository.") - scaleSetAddCmd.Flags().StringVarP(&scalesetOrganization, "org", "o", "", "Add the new scale set within this organization.") - scaleSetAddCmd.Flags().StringVarP(&scalesetEnterprise, "enterprise", "e", "", "Add the new scale set within this enterprise.") - scaleSetAddCmd.MarkFlagsMutuallyExclusive("repo", "org", "enterprise") - scaleSetAddCmd.MarkFlagsMutuallyExclusive("extra-specs-file", "extra-specs") - - scalesetCmd.AddCommand( - scalesetListCmd, - scaleSetShowCmd, - scaleSetDeleteCmd, - scaleSetUpdateCmd, - scaleSetAddCmd, - ) - - rootCmd.AddCommand(scalesetCmd) -} - -func formatScaleSets(scaleSets []params.ScaleSet) { - if outputFormat == common.OutputFormatJSON { - printAsJSON(scaleSets) - return - } - t := table.NewWriter() - header := table.Row{"ID", "Scale Set Name", "Image", "Flavor", "Belongs to", "Level", "Runner Group", "Enabled", "Runner Prefix", "Provider"} - t.AppendHeader(header) - - for _, scaleSet := range scaleSets { - var belongsTo string - var level string - - switch { - case scaleSet.RepoID != "" && scaleSet.RepoName != "": - belongsTo = scaleSet.RepoName - level = entityTypeRepo - case scaleSet.OrgID != "" && scaleSet.OrgName != "": - belongsTo = scaleSet.OrgName - level = entityTypeOrg - case scaleSet.EnterpriseID != "" && scaleSet.EnterpriseName != "": - belongsTo = scaleSet.EnterpriseName - level = entityTypeEnterprise - } - t.AppendRow(table.Row{scaleSet.ID, scaleSet.Name, scaleSet.Image, scaleSet.Flavor, belongsTo, level, scaleSet.GitHubRunnerGroup, scaleSet.Enabled, scaleSet.GetRunnerPrefix(), scaleSet.ProviderName}) - t.AppendSeparator() - } - fmt.Println(t.Render()) -} - -func formatOneScaleSet(scaleSet params.ScaleSet) { - if outputFormat == common.OutputFormatJSON { - printAsJSON(scaleSet) - return - } - t := table.NewWriter() - rowConfigAutoMerge := table.RowConfig{AutoMerge: true} - - header := table.Row{"Field", "Value"} - - var belongsTo string - var level string - - switch { - case scaleSet.RepoID != "" && scaleSet.RepoName != "": - belongsTo = scaleSet.RepoName - level = entityTypeRepo - case scaleSet.OrgID != "" && scaleSet.OrgName != "": - belongsTo = scaleSet.OrgName - level = entityTypeOrg - case scaleSet.EnterpriseID != "" && scaleSet.EnterpriseName != "": - belongsTo = scaleSet.EnterpriseName - level = entityTypeEnterprise - } - - t.AppendHeader(header) - t.AppendRow(table.Row{"ID", scaleSet.ID}) - t.AppendRow(table.Row{"Scale Set ID", scaleSet.ScaleSetID}) - t.AppendRow(table.Row{"Scale Name", scaleSet.Name}) - t.AppendRow(table.Row{"Provider Name", scaleSet.ProviderName}) - t.AppendRow(table.Row{"Image", scaleSet.Image}) - t.AppendRow(table.Row{"Flavor", scaleSet.Flavor}) - t.AppendRow(table.Row{"OS Type", scaleSet.OSType}) - t.AppendRow(table.Row{"OS Architecture", scaleSet.OSArch}) - t.AppendRow(table.Row{"Max Runners", scaleSet.MaxRunners}) - t.AppendRow(table.Row{"Min Idle Runners", scaleSet.MinIdleRunners}) - t.AppendRow(table.Row{"Runner Bootstrap Timeout", scaleSet.RunnerBootstrapTimeout}) - t.AppendRow(table.Row{"Belongs to", belongsTo}) - t.AppendRow(table.Row{"Level", level}) - t.AppendRow(table.Row{"Enabled", scaleSet.Enabled}) - t.AppendRow(table.Row{"Runner Prefix", scaleSet.GetRunnerPrefix()}) - t.AppendRow(table.Row{"Extra specs", string(scaleSet.ExtraSpecs)}) - t.AppendRow(table.Row{"GitHub Runner Group", scaleSet.GitHubRunnerGroup}) - - if len(scaleSet.Instances) > 0 { - for _, instance := range scaleSet.Instances { - t.AppendRow(table.Row{"Instances", fmt.Sprintf("%s (%s)", instance.Name, instance.ID)}, rowConfigAutoMerge) - } - } - - t.SetColumnConfigs([]table.ColumnConfig{ - {Number: 1, AutoMerge: true}, - {Number: 2, AutoMerge: false, WidthMax: 100}, - }) - fmt.Println(t.Render()) -} diff --git a/cmd/garm-cli/cmd/util.go b/cmd/garm-cli/cmd/util.go deleted file mode 100644 index 26f57abb..00000000 --- a/cmd/garm-cli/cmd/util.go +++ /dev/null @@ -1,108 +0,0 @@ -package cmd - -import ( - "fmt" - "strings" - - "github.com/google/uuid" - - apiClientEnterprises "github.com/cloudbase/garm/client/enterprises" - apiClientOrgs "github.com/cloudbase/garm/client/organizations" - apiClientRepos "github.com/cloudbase/garm/client/repositories" -) - -func resolveRepository(nameOrID, endpoint string) (string, error) { - if nameOrID == "" { - return "", fmt.Errorf("missing repository name or ID") - } - entityID, err := uuid.Parse(nameOrID) - if err == nil { - return entityID.String(), nil - } - - parts := strings.SplitN(nameOrID, "/", 2) - if len(parts) < 2 { - // format of friendly name is invalid for a repository. - // Return the string as is. - return nameOrID, nil - } - - listReposReq := apiClientRepos.NewListReposParams() - listReposReq.Owner = &parts[0] - listReposReq.Name = &parts[1] - if endpoint != "" { - listReposReq.Endpoint = &endpoint - } - response, err := apiCli.Repositories.ListRepos(listReposReq, authToken) - if err != nil { - return "", err - } - if len(response.Payload) == 0 { - return "", fmt.Errorf("repository %s was not found", nameOrID) - } - - if len(response.Payload) > 1 { - return "", fmt.Errorf("multiple repositories with the name %s exist, please use the repository ID or specify the --endpoint parameter", nameOrID) - } - return response.Payload[0].ID, nil -} - -func resolveOrganization(nameOrID, endpoint string) (string, error) { - if nameOrID == "" { - return "", fmt.Errorf("missing organization name or ID") - } - entityID, err := uuid.Parse(nameOrID) - if err == nil { - return entityID.String(), nil - } - - listOrgsReq := apiClientOrgs.NewListOrgsParams() - listOrgsReq.Name = &nameOrID - if endpoint != "" { - listOrgsReq.Endpoint = &endpoint - } - response, err := apiCli.Organizations.ListOrgs(listOrgsReq, authToken) - if err != nil { - return "", err - } - - if len(response.Payload) == 0 { - return "", fmt.Errorf("organization %s was not found", nameOrID) - } - - if len(response.Payload) > 1 { - return "", fmt.Errorf("multiple organizations with the name %s exist, please use the organization ID or specify the --endpoint parameter", nameOrID) - } - - return response.Payload[0].ID, nil -} - -func resolveEnterprise(nameOrID, endpoint string) (string, error) { - if nameOrID == "" { - return "", fmt.Errorf("missing enterprise name or ID") - } - entityID, err := uuid.Parse(nameOrID) - if err == nil { - return entityID.String(), nil - } - - listEnterprisesReq := apiClientEnterprises.NewListEnterprisesParams() - listEnterprisesReq.Name = &enterpriseName - if endpoint != "" { - listEnterprisesReq.Endpoint = &endpoint - } - response, err := apiCli.Enterprises.ListEnterprises(listEnterprisesReq, authToken) - if err != nil { - return "", err - } - - if len(response.Payload) == 0 { - return "", fmt.Errorf("enterprise %s was not found", nameOrID) - } - - if len(response.Payload) > 1 { - return "", fmt.Errorf("multiple enterprises with the name %s exist, please use the enterprise ID or specify the --endpoint parameter", nameOrID) - } - - return response.Payload[0].ID, nil -} diff --git a/cmd/garm-cli/common/cobra.go b/cmd/garm-cli/common/cobra.go index 399a4b92..e59a2aca 100644 --- a/cmd/garm-cli/common/cobra.go +++ b/cmd/garm-cli/common/cobra.go @@ -1,16 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package common import "fmt" diff --git a/cmd/garm-cli/common/common.go b/cmd/garm-cli/common/common.go index 1f607cb4..08189d21 100644 --- a/cmd/garm-cli/common/common.go +++ b/cmd/garm-cli/common/common.go @@ -15,14 +15,8 @@ package common import ( - "encoding/json" "errors" "fmt" - "os" - "runtime" - "sort" - "strings" - "time" "github.com/manifoldco/promptui" "github.com/nbutton23/zxcvbn-go" @@ -80,246 +74,3 @@ func PrintWebsocketMessage(_ int, msg []byte) error { fmt.Println(util.SanitizeLogEntry(string(msg))) return nil } - -type LogFormatter struct { - MinLevel string - AttributeFilters map[string]string - EnableColor bool -} - -type LogRecord struct { - Time string `json:"time"` - Level string `json:"level"` - Msg string `json:"msg"` - Attrs map[string]interface{} `json:",inline"` -} - -// Color codes for different log levels -const ( - ColorReset = "\033[0m" - ColorRed = "\033[31m" - ColorYellow = "\033[33m" - ColorBlue = "\033[34m" - ColorMagenta = "\033[35m" - ColorCyan = "\033[36m" - ColorWhite = "\033[37m" - ColorGray = "\033[90m" -) - -func (lf *LogFormatter) colorizeLevel(level string) string { - if !lf.EnableColor { - return level - } - - levelUpper := strings.TrimSpace(strings.ToUpper(level)) - switch levelUpper { - case "ERROR": - return ColorRed + level + ColorReset - case "WARN", "WARNING": - return ColorYellow + level + ColorReset - case "INFO": - return ColorBlue + level + ColorReset - case "DEBUG": - return ColorMagenta + level + ColorReset - default: - return level - } -} - -func (lf *LogFormatter) shouldFilterLevel(level string) bool { - if lf.MinLevel == "" { - return false - } - - levelMap := map[string]int{ - "DEBUG": 0, - "INFO": 1, - "WARN": 2, - "ERROR": 3, - } - - minLevelNum, exists := levelMap[strings.ToUpper(lf.MinLevel)] - if !exists { - return false - } - - currentLevelNum, exists := levelMap[strings.ToUpper(level)] - if !exists { - return false - } - - return currentLevelNum < minLevelNum -} - -func (lf *LogFormatter) matchesAttributeFilters(attrs map[string]interface{}, msg string) bool { - if len(lf.AttributeFilters) == 0 { - return true - } - - for key, expectedValue := range lf.AttributeFilters { - // Special handling for message filtering - if key == "msg" { - if strings.Contains(msg, expectedValue) { - return true - } - } - - // Regular attribute filtering - actualValue, exists := attrs[key] - if exists { - actualStr := fmt.Sprintf("%v", actualValue) - if actualStr == expectedValue { - return true - } - } - } - - return false -} - -func (lf *LogFormatter) FormatWebsocketMessage(_ int, msg []byte) error { - // Try to parse as JSON log record - var logRecord LogRecord - err := json.Unmarshal(msg, &logRecord) - if err != nil { - // If it's not JSON, print as-is (sanitized) - _, err = fmt.Println(util.SanitizeLogEntry(string(msg))) - return err - } - - // Apply level filtering - if lf.shouldFilterLevel(logRecord.Level) { - return nil - } - - // Parse additional attributes from the JSON - var fullRecord map[string]interface{} - if err := json.Unmarshal(msg, &fullRecord); err == nil { - // Remove standard fields and keep only attributes - delete(fullRecord, "time") - delete(fullRecord, "level") - delete(fullRecord, "msg") - logRecord.Attrs = fullRecord - } - - // Apply attribute filtering - if !lf.matchesAttributeFilters(logRecord.Attrs, logRecord.Msg) { - return nil - } - - // Format timestamp to fixed width - timeStr := logRecord.Time - if t, err := time.Parse(time.RFC3339Nano, logRecord.Time); err == nil { - timeStr = t.Format("2006-01-02 15:04:05.000") - } - - // Format log level to fixed width (5 characters) - levelStr := lf.colorizeLevel(fmt.Sprintf("%-5s", strings.ToUpper(logRecord.Level))) - - // Highlight message if it matches a msg filter - msgStr := logRecord.Msg - if msgFilter, hasMsgFilter := lf.AttributeFilters["msg"]; hasMsgFilter { - if strings.Contains(msgStr, msgFilter) && lf.EnableColor { - msgStr = ColorYellow + msgStr + ColorReset - } - } - - output := fmt.Sprintf("%s [%s] %s", timeStr, levelStr, msgStr) - - // Add attributes if any - if len(logRecord.Attrs) > 0 { - // Get sorted keys for consistent output - var keys []string - for k := range logRecord.Attrs { - keys = append(keys, k) - } - sort.Strings(keys) - - var attrPairs []string - for _, k := range keys { - v := logRecord.Attrs[k] - attrStr := fmt.Sprintf("%s=%v", k, v) - - // Highlight filtered attributes - if filterValue, isFiltered := lf.AttributeFilters[k]; isFiltered && fmt.Sprintf("%v", v) == filterValue { - if lf.EnableColor { - attrStr = ColorYellow + attrStr + ColorGray - } - } else if lf.EnableColor { - attrStr = ColorGray + attrStr - } - - attrPairs = append(attrPairs, attrStr) - } - if len(attrPairs) > 0 { - if lf.EnableColor { - output += " " + strings.Join(attrPairs, " ") + ColorReset - } else { - output += " " + strings.Join(attrPairs, " ") - } - } - } - - fmt.Println(output) - return nil -} - -// supportsColor checks if the current terminal/environment supports ANSI colors. -// This is best effort. There is no reliable way to determine if a terminal supports -// color. Set NO_COLOR=1 to disable color if your terminal doesn't support it, but this -// function returns true. -func supportsColor() bool { - // Check NO_COLOR environment variable (universal standard) - if os.Getenv("NO_COLOR") != "" { - return false - } - - // Check FORCE_COLOR environment variable - if os.Getenv("FORCE_COLOR") != "" { - return true - } - - // On Windows, check for modern terminal support - if runtime.GOOS == "windows" { - // Check for Windows Terminal - if os.Getenv("WT_SESSION") != "" { - return true - } - // Check for ConEmu - if os.Getenv("ConEmuANSI") == "ON" { - return true - } - // Check for other modern terminals - term := os.Getenv("TERM") - if strings.Contains(term, "color") || term == "xterm-256color" || term == "screen-256color" { - return true - } - // Modern PowerShell and cmd.exe with VT processing - if os.Getenv("TERM_PROGRAM") != "" { - return true - } - // Default to false for older Windows cmd.exe - return false - } - - // On Unix-like systems, check TERM - term := os.Getenv("TERM") - if term == "" || term == "dumb" { - return false - } - - return true -} - -func NewLogFormatter(minLevel string, attributeFilters map[string]string, color bool) *LogFormatter { - var enableColor bool - if color && supportsColor() { - enableColor = true - } - - return &LogFormatter{ - MinLevel: minLevel, - AttributeFilters: attributeFilters, - EnableColor: enableColor, - } -} diff --git a/cmd/garm-cli/config/config.go b/cmd/garm-cli/config/config.go index cf1cf1d2..6f6b197c 100644 --- a/cmd/garm-cli/config/config.go +++ b/cmd/garm-cli/config/config.go @@ -15,13 +15,13 @@ package config import ( - "errors" "fmt" "os" "path/filepath" "sync" "github.com/BurntSushi/toml" + "github.com/pkg/errors" runnerErrors "github.com/cloudbase/garm-provider-common/errors" ) @@ -34,11 +34,11 @@ const ( func getConfigFilePath() (string, error) { configDir, err := getHomeDir() if err != nil { - return "", fmt.Errorf("error fetching home folder: %w", err) + return "", errors.Wrap(err, "fetching home folder") } if err := ensureHomeDir(configDir); err != nil { - return "", fmt.Errorf("error ensuring config dir: %w", err) + return "", errors.Wrap(err, "ensuring config dir") } cfgFile := filepath.Join(configDir, DefaultConfigFileName) @@ -48,7 +48,7 @@ func getConfigFilePath() (string, error) { func LoadConfig() (*Config, error) { cfgFile, err := getConfigFilePath() if err != nil { - return nil, fmt.Errorf("error fetching config: %w", err) + return nil, errors.Wrap(err, "fetching config") } if _, err := os.Stat(cfgFile); err != nil { @@ -56,12 +56,12 @@ func LoadConfig() (*Config, error) { // return empty config return &Config{}, nil } - return nil, fmt.Errorf("error accessing config file: %w", err) + return nil, errors.Wrap(err, "accessing config file") } var config Config if _, err := toml.DecodeFile(cfgFile, &config); err != nil { - return nil, fmt.Errorf("error decoding toml: %w", err) + return nil, errors.Wrap(err, "decoding toml") } return &config, nil @@ -157,17 +157,17 @@ func (c *Config) SaveConfig() error { cfgFile, err := getConfigFilePath() if err != nil { if !errors.Is(err, os.ErrNotExist) { - return fmt.Errorf("error getting config: %w", err) + return errors.Wrap(err, "getting config") } } cfgHandle, err := os.Create(cfgFile) if err != nil { - return fmt.Errorf("error getting file handle: %w", err) + return errors.Wrap(err, "getting file handle") } encoder := toml.NewEncoder(cfgHandle) if err := encoder.Encode(c); err != nil { - return fmt.Errorf("error saving config: %w", err) + return errors.Wrap(err, "saving config") } return nil diff --git a/cmd/garm-cli/config/home.go b/cmd/garm-cli/config/home.go index 11821e9c..b6043289 100644 --- a/cmd/garm-cli/config/home.go +++ b/cmd/garm-cli/config/home.go @@ -15,19 +15,19 @@ package config import ( - "errors" - "fmt" "os" + + "github.com/pkg/errors" ) func ensureHomeDir(folder string) error { if _, err := os.Stat(folder); err != nil { if !errors.Is(err, os.ErrNotExist) { - return fmt.Errorf("error checking home dir: %w", err) + return errors.Wrap(err, "checking home dir") } if err := os.MkdirAll(folder, 0o710); err != nil { - return fmt.Errorf("error creating %s: %w", folder, err) + return errors.Wrapf(err, "creating %s", folder) } } diff --git a/cmd/garm-cli/config/home_nix.go b/cmd/garm-cli/config/home_nix.go index 323f29d7..27aed4f8 100644 --- a/cmd/garm-cli/config/home_nix.go +++ b/cmd/garm-cli/config/home_nix.go @@ -1,31 +1,19 @@ //go:build !windows // +build !windows -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package config import ( - "fmt" "os" "path/filepath" + + "github.com/pkg/errors" ) func getHomeDir() (string, error) { home, err := os.UserHomeDir() if err != nil { - return "", fmt.Errorf("error fetching home dir: %w", err) + return "", errors.Wrap(err, "fetching home dir") } return filepath.Join(home, ".local", "share", DefaultAppFolder), nil diff --git a/cmd/garm-cli/config/home_windows.go b/cmd/garm-cli/config/home_windows.go index c70fb645..d34379b4 100644 --- a/cmd/garm-cli/config/home_windows.go +++ b/cmd/garm-cli/config/home_windows.go @@ -1,19 +1,6 @@ //go:build windows && !linux // +build windows,!linux -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package config import ( diff --git a/cmd/garm/main.go b/cmd/garm/main.go index cba3a064..45f8fe82 100644 --- a/cmd/garm/main.go +++ b/cmd/garm/main.go @@ -18,18 +18,19 @@ import ( "context" "flag" "fmt" + "io" "log" "log/slog" "net" "net/http" "os" "os/signal" - "runtime" "syscall" "time" "github.com/gorilla/handlers" "github.com/gorilla/mux" + "github.com/pkg/errors" lumberjack "gopkg.in/natefinch/lumberjack.v2" "github.com/cloudbase/garm-provider-common/util" @@ -40,18 +41,13 @@ import ( "github.com/cloudbase/garm/database" "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" - "github.com/cloudbase/garm/locking" "github.com/cloudbase/garm/metrics" "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner" //nolint:typecheck runnerMetrics "github.com/cloudbase/garm/runner/metrics" - "github.com/cloudbase/garm/runner/providers" garmUtil "github.com/cloudbase/garm/util" "github.com/cloudbase/garm/util/appdefaults" "github.com/cloudbase/garm/websocket" - "github.com/cloudbase/garm/workers/cache" - "github.com/cloudbase/garm/workers/entity" - "github.com/cloudbase/garm/workers/provider" ) var ( @@ -64,17 +60,16 @@ var signals = []os.Signal{ syscall.SIGTERM, } -func maybeInitController(db common.Store) (params.ControllerInfo, error) { - if info, err := db.ControllerInfo(); err == nil { - return info, nil +func maybeInitController(db common.Store) error { + if _, err := db.ControllerInfo(); err == nil { + return nil } - info, err := db.InitController() - if err != nil { - return params.ControllerInfo{}, fmt.Errorf("error initializing controller: %w", err) + if _, err := db.InitController(); err != nil { + return errors.Wrap(err, "initializing controller") } - return info, nil + return nil } func setupLogging(ctx context.Context, logCfg config.Logging, hub *websocket.Hub) { @@ -103,6 +98,16 @@ func setupLogging(ctx context.Context, logCfg config.Logging, hub *websocket.Hub } }() + writers := []io.Writer{ + logWriter, + } + + if hub != nil { + writers = append(writers, hub) + } + + wr := io.MultiWriter(writers...) + var logLevel slog.Level switch logCfg.LogLevel { case config.LevelDebug: @@ -123,25 +128,16 @@ func setupLogging(ctx context.Context, logCfg config.Logging, hub *websocket.Hub Level: logLevel, } - var fileHan slog.Handler + var han slog.Handler switch logCfg.LogFormat { case config.FormatJSON: - fileHan = slog.NewJSONHandler(logWriter, &opts) + han = slog.NewJSONHandler(wr, &opts) default: - fileHan = slog.NewTextHandler(logWriter, &opts) + han = slog.NewTextHandler(wr, &opts) } - handlers := []slog.Handler{ - fileHan, - } - - if hub != nil { - wsHan := slog.NewJSONHandler(hub, &opts) - handlers = append(handlers, wsHan) - } - - wrapped := &garmUtil.SlogMultiHandler{ - Handlers: handlers, + wrapped := garmUtil.ContextHandler{ + Handler: han, } slog.SetDefault(slog.New(wrapped)) } @@ -149,7 +145,7 @@ func setupLogging(ctx context.Context, logCfg config.Logging, hub *websocket.Hub func maybeUpdateURLsFromConfig(cfg config.Config, store common.Store) error { info, err := store.ControllerInfo() if err != nil { - return fmt.Errorf("error fetching controller info: %w", err) + return errors.Wrap(err, "fetching controller info") } var updateParams params.UpdateControllerParams @@ -173,12 +169,11 @@ func maybeUpdateURLsFromConfig(cfg config.Config, store common.Store) error { _, err = store.UpdateController(updateParams) if err != nil { - return fmt.Errorf("error updating controller info: %w", err) + return errors.Wrap(err, "updating controller info") } return nil } -//gocyclo:ignore func main() { flag.Parse() if *version { @@ -215,60 +210,14 @@ func main() { log.Fatal(err) } - controllerInfo, err := maybeInitController(db) - if err != nil { + if err := maybeInitController(db); err != nil { log.Fatal(err) } - // Local locker for now. Will be configurable in the future, - // as we add scale-out capability to GARM. - lock, err := locking.NewLocalLocker(ctx, db) - if err != nil { - log.Fatalf("failed to create locker: %q", err) - } - - if err := locking.RegisterLocker(lock); err != nil { - log.Fatalf("failed to register locker: %q", err) - } - if err := maybeUpdateURLsFromConfig(*cfg, db); err != nil { log.Fatal(err) } - cacheWorker := cache.NewWorker(ctx, db) - if err != nil { - log.Fatalf("failed to create cache worker: %+v", err) - } - if err := cacheWorker.Start(); err != nil { - log.Fatalf("failed to start cache worker: %+v", err) - } - - providers, err := providers.LoadProvidersFromConfig(ctx, *cfg, controllerInfo.ControllerID.String()) - if err != nil { - log.Fatalf("loading providers: %+v", err) - } - - entityController, err := entity.NewController(ctx, db, providers) - if err != nil { - log.Fatalf("failed to create entity controller: %+v", err) - } - if err := entityController.Start(); err != nil { - log.Fatalf("failed to start entity controller: %+v", err) - } - - instanceTokenGetter, err := auth.NewInstanceTokenGetter(cfg.JWTAuth.Secret) - if err != nil { - log.Fatalf("failed to create instance token getter: %+v", err) - } - - providerWorker, err := provider.NewWorker(ctx, db, providers, instanceTokenGetter) - if err != nil { - log.Fatalf("failed to create provider worker: %+v", err) - } - if err := providerWorker.Start(); err != nil { - log.Fatalf("failed to start provider worker: %+v", err) - } - runner, err := runner.NewRunner(ctx, *cfg, db) if err != nil { log.Fatalf("failed to create controller: %+v", err) @@ -280,7 +229,7 @@ func main() { } authenticator := auth.NewAuthenticator(cfg.JWTAuth, db) - controller, err := controllers.NewAPIController(runner, authenticator, hub, cfg.APIServer) + controller, err := controllers.NewAPIController(runner, authenticator, hub) if err != nil { log.Fatalf("failed to create controller: %+v", err) } @@ -312,9 +261,6 @@ func main() { router := routers.NewAPIRouter(controller, jwtMiddleware, initMiddleware, urlsRequiredMiddleware, instanceMiddleware, cfg.Default.EnableWebhookManagement) - // Add WebUI routes - router = routers.WithWebUI(router, cfg.APIServer) - // start the metrics collector if cfg.Metrics.Enable { slog.InfoContext(ctx, "setting up metric routes") @@ -330,8 +276,6 @@ func main() { } if cfg.Default.DebugServer { - runtime.SetBlockProfileRate(1) - runtime.SetMutexProfileFraction(1) slog.InfoContext(ctx, "setting up debug routes") router = routers.WithDebugServer(router) } @@ -370,27 +314,12 @@ func main() { <-ctx.Done() - slog.InfoContext(ctx, "shutting down http server") shutdownCtx, shutdownCancel := context.WithTimeout(context.Background(), 60*time.Second) defer shutdownCancel() if err := srv.Shutdown(shutdownCtx); err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "graceful api server shutdown failed") } - if err := cacheWorker.Stop(); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to stop credentials worker") - } - - slog.InfoContext(ctx, "shutting down entity controller") - if err := entityController.Stop(); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to stop entity controller") - } - - slog.InfoContext(ctx, "shutting down provider worker") - if err := providerWorker.Stop(); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to stop provider worker") - } - slog.With(slog.Any("error", err)).InfoContext(ctx, "waiting for runner to stop") if err := runner.Wait(); err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to shutdown workers") diff --git a/config/config.go b/config/config.go index 31a16ae2..57ec0e80 100644 --- a/config/config.go +++ b/config/config.go @@ -31,6 +31,7 @@ import ( "github.com/BurntSushi/toml" "github.com/bradleyfalzon/ghinstallation/v2" zxcvbn "github.com/nbutton23/zxcvbn-go" + "github.com/pkg/errors" "golang.org/x/oauth2" "github.com/cloudbase/garm/params" @@ -83,10 +84,10 @@ const ( func NewConfig(cfgFile string) (*Config, error) { var config Config if _, err := toml.DecodeFile(cfgFile, &config); err != nil { - return nil, fmt.Errorf("error decoding toml: %w", err) + return nil, errors.Wrap(err, "decoding toml") } if err := config.Validate(); err != nil { - return nil, fmt.Errorf("error validating config: %w", err) + return nil, errors.Wrap(err, "validating config") } return &config, nil } @@ -495,19 +496,19 @@ type Database struct { // GormParams returns the database type and connection URI func (d *Database) GormParams() (dbType DBBackendType, uri string, err error) { if err := d.Validate(); err != nil { - return "", "", fmt.Errorf("error validating database config: %w", err) + return "", "", errors.Wrap(err, "validating database config") } dbType = d.DbBackend switch dbType { case MySQLBackend: uri, err = d.MySQL.ConnectionString() if err != nil { - return "", "", fmt.Errorf("error fetching mysql connection string: %w", err) + return "", "", errors.Wrap(err, "fetching mysql connection string") } case SQLiteBackend: uri, err = d.SQLite.ConnectionString() if err != nil { - return "", "", fmt.Errorf("error fetching sqlite3 connection string: %w", err) + return "", "", errors.Wrap(err, "fetching sqlite3 connection string") } default: return "", "", fmt.Errorf("invalid database backend: %s", dbType) @@ -662,21 +663,6 @@ func (m *Metrics) Duration() time.Duration { return duration } -// WebUI holds configuration for the web UI -type WebUI struct { - EnableWebUI bool `toml:"enable" json:"enable"` -} - -// Validate validates the WebUI config -func (w *WebUI) Validate() error { - return nil -} - -// GetWebappPath returns the webapp path with proper formatting -func (w *WebUI) GetWebappPath() string { - return "/ui/" -} - // APIServer holds configuration for the API server // worker type APIServer struct { @@ -685,7 +671,6 @@ type APIServer struct { UseTLS bool `toml:"use_tls" json:"use-tls"` TLSConfig TLSConfig `toml:"tls" json:"tls"` CORSOrigins []string `toml:"cors_origins" json:"cors-origins"` - WebUI WebUI `toml:"webui" json:"webui"` } // BindAddress returns a host:port string. @@ -711,11 +696,6 @@ func (a *APIServer) Validate() error { // when we try to bind to it. return fmt.Errorf("invalid IP address") } - - if err := a.WebUI.Validate(); err != nil { - return fmt.Errorf("invalid webui config: %w", err) - } - return nil } diff --git a/config/config_test.go b/config/config_test.go index bbf9e299..52c2928e 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -517,6 +517,7 @@ func TestJWTAuthConfig(t *testing.T) { func TestTimeToLiveDuration(t *testing.T) { cfg := JWTAuth{ + Secret: EncryptionPassphrase, TimeToLive: "48h", } diff --git a/database/common/errors.go b/database/common/errors.go index 5e6a5087..df2d936a 100644 --- a/database/common/errors.go +++ b/database/common/errors.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package common import "fmt" diff --git a/database/common/mocks/Store.go b/database/common/mocks/Store.go index 024a1271..841982b4 100644 --- a/database/common/mocks/Store.go +++ b/database/common/mocks/Store.go @@ -1,4 +1,4 @@ -// Code generated by mockery. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks @@ -14,68 +14,9 @@ type Store struct { mock.Mock } -type Store_Expecter struct { - mock *mock.Mock -} - -func (_m *Store) EXPECT() *Store_Expecter { - return &Store_Expecter{mock: &_m.Mock} -} - -// AddEntityEvent provides a mock function with given fields: ctx, entity, event, eventLevel, statusMessage, maxEvents -func (_m *Store) AddEntityEvent(ctx context.Context, entity params.ForgeEntity, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { - ret := _m.Called(ctx, entity, event, eventLevel, statusMessage, maxEvents) - - if len(ret) == 0 { - panic("no return value specified for AddEntityEvent") - } - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, params.EventType, params.EventLevel, string, int) error); ok { - r0 = rf(ctx, entity, event, eventLevel, statusMessage, maxEvents) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// Store_AddEntityEvent_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AddEntityEvent' -type Store_AddEntityEvent_Call struct { - *mock.Call -} - -// AddEntityEvent is a helper method to define mock.On call -// - ctx context.Context -// - entity params.ForgeEntity -// - event params.EventType -// - eventLevel params.EventLevel -// - statusMessage string -// - maxEvents int -func (_e *Store_Expecter) AddEntityEvent(ctx interface{}, entity interface{}, event interface{}, eventLevel interface{}, statusMessage interface{}, maxEvents interface{}) *Store_AddEntityEvent_Call { - return &Store_AddEntityEvent_Call{Call: _e.mock.On("AddEntityEvent", ctx, entity, event, eventLevel, statusMessage, maxEvents)} -} - -func (_c *Store_AddEntityEvent_Call) Run(run func(ctx context.Context, entity params.ForgeEntity, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int)) *Store_AddEntityEvent_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.ForgeEntity), args[2].(params.EventType), args[3].(params.EventLevel), args[4].(string), args[5].(int)) - }) - return _c -} - -func (_c *Store_AddEntityEvent_Call) Return(_a0 error) *Store_AddEntityEvent_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Store_AddEntityEvent_Call) RunAndReturn(run func(context.Context, params.ForgeEntity, params.EventType, params.EventLevel, string, int) error) *Store_AddEntityEvent_Call { - _c.Call.Return(run) - return _c -} - -// AddInstanceEvent provides a mock function with given fields: ctx, instanceNameOrID, event, eventLevel, eventMessage -func (_m *Store) AddInstanceEvent(ctx context.Context, instanceNameOrID string, event params.EventType, eventLevel params.EventLevel, eventMessage string) error { - ret := _m.Called(ctx, instanceNameOrID, event, eventLevel, eventMessage) +// AddInstanceEvent provides a mock function with given fields: ctx, instanceName, event, eventLevel, eventMessage +func (_m *Store) AddInstanceEvent(ctx context.Context, instanceName string, event params.EventType, eventLevel params.EventLevel, eventMessage string) error { + ret := _m.Called(ctx, instanceName, event, eventLevel, eventMessage) if len(ret) == 0 { panic("no return value specified for AddInstanceEvent") @@ -83,7 +24,7 @@ func (_m *Store) AddInstanceEvent(ctx context.Context, instanceNameOrID string, var r0 error if rf, ok := ret.Get(0).(func(context.Context, string, params.EventType, params.EventLevel, string) error); ok { - r0 = rf(ctx, instanceNameOrID, event, eventLevel, eventMessage) + r0 = rf(ctx, instanceName, event, eventLevel, eventMessage) } else { r0 = ret.Error(0) } @@ -91,38 +32,6 @@ func (_m *Store) AddInstanceEvent(ctx context.Context, instanceNameOrID string, return r0 } -// Store_AddInstanceEvent_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AddInstanceEvent' -type Store_AddInstanceEvent_Call struct { - *mock.Call -} - -// AddInstanceEvent is a helper method to define mock.On call -// - ctx context.Context -// - instanceNameOrID string -// - event params.EventType -// - eventLevel params.EventLevel -// - eventMessage string -func (_e *Store_Expecter) AddInstanceEvent(ctx interface{}, instanceNameOrID interface{}, event interface{}, eventLevel interface{}, eventMessage interface{}) *Store_AddInstanceEvent_Call { - return &Store_AddInstanceEvent_Call{Call: _e.mock.On("AddInstanceEvent", ctx, instanceNameOrID, event, eventLevel, eventMessage)} -} - -func (_c *Store_AddInstanceEvent_Call) Run(run func(ctx context.Context, instanceNameOrID string, event params.EventType, eventLevel params.EventLevel, eventMessage string)) *Store_AddInstanceEvent_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(params.EventType), args[3].(params.EventLevel), args[4].(string)) - }) - return _c -} - -func (_c *Store_AddInstanceEvent_Call) Return(_a0 error) *Store_AddInstanceEvent_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Store_AddInstanceEvent_Call) RunAndReturn(run func(context.Context, string, params.EventType, params.EventLevel, string) error) *Store_AddInstanceEvent_Call { - _c.Call.Return(run) - return _c -} - // BreakLockJobIsQueued provides a mock function with given fields: ctx, jobID func (_m *Store) BreakLockJobIsQueued(ctx context.Context, jobID int64) error { ret := _m.Called(ctx, jobID) @@ -141,35 +50,6 @@ func (_m *Store) BreakLockJobIsQueued(ctx context.Context, jobID int64) error { return r0 } -// Store_BreakLockJobIsQueued_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'BreakLockJobIsQueued' -type Store_BreakLockJobIsQueued_Call struct { - *mock.Call -} - -// BreakLockJobIsQueued is a helper method to define mock.On call -// - ctx context.Context -// - jobID int64 -func (_e *Store_Expecter) BreakLockJobIsQueued(ctx interface{}, jobID interface{}) *Store_BreakLockJobIsQueued_Call { - return &Store_BreakLockJobIsQueued_Call{Call: _e.mock.On("BreakLockJobIsQueued", ctx, jobID)} -} - -func (_c *Store_BreakLockJobIsQueued_Call) Run(run func(ctx context.Context, jobID int64)) *Store_BreakLockJobIsQueued_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(int64)) - }) - return _c -} - -func (_c *Store_BreakLockJobIsQueued_Call) Return(_a0 error) *Store_BreakLockJobIsQueued_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Store_BreakLockJobIsQueued_Call) RunAndReturn(run func(context.Context, int64) error) *Store_BreakLockJobIsQueued_Call { - _c.Call.Return(run) - return _c -} - // ControllerInfo provides a mock function with no fields func (_m *Store) ControllerInfo() (params.ControllerInfo, error) { ret := _m.Called() @@ -198,35 +78,8 @@ func (_m *Store) ControllerInfo() (params.ControllerInfo, error) { return r0, r1 } -// Store_ControllerInfo_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ControllerInfo' -type Store_ControllerInfo_Call struct { - *mock.Call -} - -// ControllerInfo is a helper method to define mock.On call -func (_e *Store_Expecter) ControllerInfo() *Store_ControllerInfo_Call { - return &Store_ControllerInfo_Call{Call: _e.mock.On("ControllerInfo")} -} - -func (_c *Store_ControllerInfo_Call) Run(run func()) *Store_ControllerInfo_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *Store_ControllerInfo_Call) Return(_a0 params.ControllerInfo, _a1 error) *Store_ControllerInfo_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_ControllerInfo_Call) RunAndReturn(run func() (params.ControllerInfo, error)) *Store_ControllerInfo_Call { - _c.Call.Return(run) - return _c -} - // CreateEnterprise provides a mock function with given fields: ctx, name, credentialsName, webhookSecret, poolBalancerType -func (_m *Store) CreateEnterprise(ctx context.Context, name string, credentialsName params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Enterprise, error) { +func (_m *Store) CreateEnterprise(ctx context.Context, name string, credentialsName string, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Enterprise, error) { ret := _m.Called(ctx, name, credentialsName, webhookSecret, poolBalancerType) if len(ret) == 0 { @@ -235,16 +88,16 @@ func (_m *Store) CreateEnterprise(ctx context.Context, name string, credentialsN var r0 params.Enterprise var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, params.ForgeCredentials, string, params.PoolBalancerType) (params.Enterprise, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, string, string, params.PoolBalancerType) (params.Enterprise, error)); ok { return rf(ctx, name, credentialsName, webhookSecret, poolBalancerType) } - if rf, ok := ret.Get(0).(func(context.Context, string, params.ForgeCredentials, string, params.PoolBalancerType) params.Enterprise); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, string, string, params.PoolBalancerType) params.Enterprise); ok { r0 = rf(ctx, name, credentialsName, webhookSecret, poolBalancerType) } else { r0 = ret.Get(0).(params.Enterprise) } - if rf, ok := ret.Get(1).(func(context.Context, string, params.ForgeCredentials, string, params.PoolBalancerType) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, string, string, string, params.PoolBalancerType) error); ok { r1 = rf(ctx, name, credentialsName, webhookSecret, poolBalancerType) } else { r1 = ret.Error(1) @@ -253,40 +106,8 @@ func (_m *Store) CreateEnterprise(ctx context.Context, name string, credentialsN return r0, r1 } -// Store_CreateEnterprise_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateEnterprise' -type Store_CreateEnterprise_Call struct { - *mock.Call -} - -// CreateEnterprise is a helper method to define mock.On call -// - ctx context.Context -// - name string -// - credentialsName params.ForgeCredentials -// - webhookSecret string -// - poolBalancerType params.PoolBalancerType -func (_e *Store_Expecter) CreateEnterprise(ctx interface{}, name interface{}, credentialsName interface{}, webhookSecret interface{}, poolBalancerType interface{}) *Store_CreateEnterprise_Call { - return &Store_CreateEnterprise_Call{Call: _e.mock.On("CreateEnterprise", ctx, name, credentialsName, webhookSecret, poolBalancerType)} -} - -func (_c *Store_CreateEnterprise_Call) Run(run func(ctx context.Context, name string, credentialsName params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType)) *Store_CreateEnterprise_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(params.ForgeCredentials), args[3].(string), args[4].(params.PoolBalancerType)) - }) - return _c -} - -func (_c *Store_CreateEnterprise_Call) Return(_a0 params.Enterprise, _a1 error) *Store_CreateEnterprise_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_CreateEnterprise_Call) RunAndReturn(run func(context.Context, string, params.ForgeCredentials, string, params.PoolBalancerType) (params.Enterprise, error)) *Store_CreateEnterprise_Call { - _c.Call.Return(run) - return _c -} - // CreateEntityPool provides a mock function with given fields: ctx, entity, param -func (_m *Store) CreateEntityPool(ctx context.Context, entity params.ForgeEntity, param params.CreatePoolParams) (params.Pool, error) { +func (_m *Store) CreateEntityPool(ctx context.Context, entity params.GithubEntity, param params.CreatePoolParams) (params.Pool, error) { ret := _m.Called(ctx, entity, param) if len(ret) == 0 { @@ -295,16 +116,16 @@ func (_m *Store) CreateEntityPool(ctx context.Context, entity params.ForgeEntity var r0 params.Pool var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, params.CreatePoolParams) (params.Pool, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, params.CreatePoolParams) (params.Pool, error)); ok { return rf(ctx, entity, param) } - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, params.CreatePoolParams) params.Pool); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, params.CreatePoolParams) params.Pool); ok { r0 = rf(ctx, entity, param) } else { r0 = ret.Get(0).(params.Pool) } - if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntity, params.CreatePoolParams) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, params.GithubEntity, params.CreatePoolParams) error); ok { r1 = rf(ctx, entity, param) } else { r1 = ret.Error(1) @@ -313,225 +134,23 @@ func (_m *Store) CreateEntityPool(ctx context.Context, entity params.ForgeEntity return r0, r1 } -// Store_CreateEntityPool_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateEntityPool' -type Store_CreateEntityPool_Call struct { - *mock.Call -} - -// CreateEntityPool is a helper method to define mock.On call -// - ctx context.Context -// - entity params.ForgeEntity -// - param params.CreatePoolParams -func (_e *Store_Expecter) CreateEntityPool(ctx interface{}, entity interface{}, param interface{}) *Store_CreateEntityPool_Call { - return &Store_CreateEntityPool_Call{Call: _e.mock.On("CreateEntityPool", ctx, entity, param)} -} - -func (_c *Store_CreateEntityPool_Call) Run(run func(ctx context.Context, entity params.ForgeEntity, param params.CreatePoolParams)) *Store_CreateEntityPool_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.ForgeEntity), args[2].(params.CreatePoolParams)) - }) - return _c -} - -func (_c *Store_CreateEntityPool_Call) Return(_a0 params.Pool, _a1 error) *Store_CreateEntityPool_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_CreateEntityPool_Call) RunAndReturn(run func(context.Context, params.ForgeEntity, params.CreatePoolParams) (params.Pool, error)) *Store_CreateEntityPool_Call { - _c.Call.Return(run) - return _c -} - -// CreateEntityScaleSet provides a mock function with given fields: _a0, entity, param -func (_m *Store) CreateEntityScaleSet(_a0 context.Context, entity params.ForgeEntity, param params.CreateScaleSetParams) (params.ScaleSet, error) { - ret := _m.Called(_a0, entity, param) - - if len(ret) == 0 { - panic("no return value specified for CreateEntityScaleSet") - } - - var r0 params.ScaleSet - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, params.CreateScaleSetParams) (params.ScaleSet, error)); ok { - return rf(_a0, entity, param) - } - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, params.CreateScaleSetParams) params.ScaleSet); ok { - r0 = rf(_a0, entity, param) - } else { - r0 = ret.Get(0).(params.ScaleSet) - } - - if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntity, params.CreateScaleSetParams) error); ok { - r1 = rf(_a0, entity, param) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Store_CreateEntityScaleSet_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateEntityScaleSet' -type Store_CreateEntityScaleSet_Call struct { - *mock.Call -} - -// CreateEntityScaleSet is a helper method to define mock.On call -// - _a0 context.Context -// - entity params.ForgeEntity -// - param params.CreateScaleSetParams -func (_e *Store_Expecter) CreateEntityScaleSet(_a0 interface{}, entity interface{}, param interface{}) *Store_CreateEntityScaleSet_Call { - return &Store_CreateEntityScaleSet_Call{Call: _e.mock.On("CreateEntityScaleSet", _a0, entity, param)} -} - -func (_c *Store_CreateEntityScaleSet_Call) Run(run func(_a0 context.Context, entity params.ForgeEntity, param params.CreateScaleSetParams)) *Store_CreateEntityScaleSet_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.ForgeEntity), args[2].(params.CreateScaleSetParams)) - }) - return _c -} - -func (_c *Store_CreateEntityScaleSet_Call) Return(scaleSet params.ScaleSet, err error) *Store_CreateEntityScaleSet_Call { - _c.Call.Return(scaleSet, err) - return _c -} - -func (_c *Store_CreateEntityScaleSet_Call) RunAndReturn(run func(context.Context, params.ForgeEntity, params.CreateScaleSetParams) (params.ScaleSet, error)) *Store_CreateEntityScaleSet_Call { - _c.Call.Return(run) - return _c -} - -// CreateGiteaCredentials provides a mock function with given fields: ctx, param -func (_m *Store) CreateGiteaCredentials(ctx context.Context, param params.CreateGiteaCredentialsParams) (params.ForgeCredentials, error) { - ret := _m.Called(ctx, param) - - if len(ret) == 0 { - panic("no return value specified for CreateGiteaCredentials") - } - - var r0 params.ForgeCredentials - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.CreateGiteaCredentialsParams) (params.ForgeCredentials, error)); ok { - return rf(ctx, param) - } - if rf, ok := ret.Get(0).(func(context.Context, params.CreateGiteaCredentialsParams) params.ForgeCredentials); ok { - r0 = rf(ctx, param) - } else { - r0 = ret.Get(0).(params.ForgeCredentials) - } - - if rf, ok := ret.Get(1).(func(context.Context, params.CreateGiteaCredentialsParams) error); ok { - r1 = rf(ctx, param) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Store_CreateGiteaCredentials_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateGiteaCredentials' -type Store_CreateGiteaCredentials_Call struct { - *mock.Call -} - -// CreateGiteaCredentials is a helper method to define mock.On call -// - ctx context.Context -// - param params.CreateGiteaCredentialsParams -func (_e *Store_Expecter) CreateGiteaCredentials(ctx interface{}, param interface{}) *Store_CreateGiteaCredentials_Call { - return &Store_CreateGiteaCredentials_Call{Call: _e.mock.On("CreateGiteaCredentials", ctx, param)} -} - -func (_c *Store_CreateGiteaCredentials_Call) Run(run func(ctx context.Context, param params.CreateGiteaCredentialsParams)) *Store_CreateGiteaCredentials_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.CreateGiteaCredentialsParams)) - }) - return _c -} - -func (_c *Store_CreateGiteaCredentials_Call) Return(gtCreds params.ForgeCredentials, err error) *Store_CreateGiteaCredentials_Call { - _c.Call.Return(gtCreds, err) - return _c -} - -func (_c *Store_CreateGiteaCredentials_Call) RunAndReturn(run func(context.Context, params.CreateGiteaCredentialsParams) (params.ForgeCredentials, error)) *Store_CreateGiteaCredentials_Call { - _c.Call.Return(run) - return _c -} - -// CreateGiteaEndpoint provides a mock function with given fields: _a0, param -func (_m *Store) CreateGiteaEndpoint(_a0 context.Context, param params.CreateGiteaEndpointParams) (params.ForgeEndpoint, error) { - ret := _m.Called(_a0, param) - - if len(ret) == 0 { - panic("no return value specified for CreateGiteaEndpoint") - } - - var r0 params.ForgeEndpoint - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.CreateGiteaEndpointParams) (params.ForgeEndpoint, error)); ok { - return rf(_a0, param) - } - if rf, ok := ret.Get(0).(func(context.Context, params.CreateGiteaEndpointParams) params.ForgeEndpoint); ok { - r0 = rf(_a0, param) - } else { - r0 = ret.Get(0).(params.ForgeEndpoint) - } - - if rf, ok := ret.Get(1).(func(context.Context, params.CreateGiteaEndpointParams) error); ok { - r1 = rf(_a0, param) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Store_CreateGiteaEndpoint_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateGiteaEndpoint' -type Store_CreateGiteaEndpoint_Call struct { - *mock.Call -} - -// CreateGiteaEndpoint is a helper method to define mock.On call -// - _a0 context.Context -// - param params.CreateGiteaEndpointParams -func (_e *Store_Expecter) CreateGiteaEndpoint(_a0 interface{}, param interface{}) *Store_CreateGiteaEndpoint_Call { - return &Store_CreateGiteaEndpoint_Call{Call: _e.mock.On("CreateGiteaEndpoint", _a0, param)} -} - -func (_c *Store_CreateGiteaEndpoint_Call) Run(run func(_a0 context.Context, param params.CreateGiteaEndpointParams)) *Store_CreateGiteaEndpoint_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.CreateGiteaEndpointParams)) - }) - return _c -} - -func (_c *Store_CreateGiteaEndpoint_Call) Return(ghEndpoint params.ForgeEndpoint, err error) *Store_CreateGiteaEndpoint_Call { - _c.Call.Return(ghEndpoint, err) - return _c -} - -func (_c *Store_CreateGiteaEndpoint_Call) RunAndReturn(run func(context.Context, params.CreateGiteaEndpointParams) (params.ForgeEndpoint, error)) *Store_CreateGiteaEndpoint_Call { - _c.Call.Return(run) - return _c -} - // CreateGithubCredentials provides a mock function with given fields: ctx, param -func (_m *Store) CreateGithubCredentials(ctx context.Context, param params.CreateGithubCredentialsParams) (params.ForgeCredentials, error) { +func (_m *Store) CreateGithubCredentials(ctx context.Context, param params.CreateGithubCredentialsParams) (params.GithubCredentials, error) { ret := _m.Called(ctx, param) if len(ret) == 0 { panic("no return value specified for CreateGithubCredentials") } - var r0 params.ForgeCredentials + var r0 params.GithubCredentials var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.CreateGithubCredentialsParams) (params.ForgeCredentials, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.CreateGithubCredentialsParams) (params.GithubCredentials, error)); ok { return rf(ctx, param) } - if rf, ok := ret.Get(0).(func(context.Context, params.CreateGithubCredentialsParams) params.ForgeCredentials); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.CreateGithubCredentialsParams) params.GithubCredentials); ok { r0 = rf(ctx, param) } else { - r0 = ret.Get(0).(params.ForgeCredentials) + r0 = ret.Get(0).(params.GithubCredentials) } if rf, ok := ret.Get(1).(func(context.Context, params.CreateGithubCredentialsParams) error); ok { @@ -543,52 +162,23 @@ func (_m *Store) CreateGithubCredentials(ctx context.Context, param params.Creat return r0, r1 } -// Store_CreateGithubCredentials_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateGithubCredentials' -type Store_CreateGithubCredentials_Call struct { - *mock.Call -} - -// CreateGithubCredentials is a helper method to define mock.On call -// - ctx context.Context -// - param params.CreateGithubCredentialsParams -func (_e *Store_Expecter) CreateGithubCredentials(ctx interface{}, param interface{}) *Store_CreateGithubCredentials_Call { - return &Store_CreateGithubCredentials_Call{Call: _e.mock.On("CreateGithubCredentials", ctx, param)} -} - -func (_c *Store_CreateGithubCredentials_Call) Run(run func(ctx context.Context, param params.CreateGithubCredentialsParams)) *Store_CreateGithubCredentials_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.CreateGithubCredentialsParams)) - }) - return _c -} - -func (_c *Store_CreateGithubCredentials_Call) Return(_a0 params.ForgeCredentials, _a1 error) *Store_CreateGithubCredentials_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_CreateGithubCredentials_Call) RunAndReturn(run func(context.Context, params.CreateGithubCredentialsParams) (params.ForgeCredentials, error)) *Store_CreateGithubCredentials_Call { - _c.Call.Return(run) - return _c -} - // CreateGithubEndpoint provides a mock function with given fields: ctx, param -func (_m *Store) CreateGithubEndpoint(ctx context.Context, param params.CreateGithubEndpointParams) (params.ForgeEndpoint, error) { +func (_m *Store) CreateGithubEndpoint(ctx context.Context, param params.CreateGithubEndpointParams) (params.GithubEndpoint, error) { ret := _m.Called(ctx, param) if len(ret) == 0 { panic("no return value specified for CreateGithubEndpoint") } - var r0 params.ForgeEndpoint + var r0 params.GithubEndpoint var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.CreateGithubEndpointParams) (params.ForgeEndpoint, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.CreateGithubEndpointParams) (params.GithubEndpoint, error)); ok { return rf(ctx, param) } - if rf, ok := ret.Get(0).(func(context.Context, params.CreateGithubEndpointParams) params.ForgeEndpoint); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.CreateGithubEndpointParams) params.GithubEndpoint); ok { r0 = rf(ctx, param) } else { - r0 = ret.Get(0).(params.ForgeEndpoint) + r0 = ret.Get(0).(params.GithubEndpoint) } if rf, ok := ret.Get(1).(func(context.Context, params.CreateGithubEndpointParams) error); ok { @@ -600,35 +190,6 @@ func (_m *Store) CreateGithubEndpoint(ctx context.Context, param params.CreateGi return r0, r1 } -// Store_CreateGithubEndpoint_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateGithubEndpoint' -type Store_CreateGithubEndpoint_Call struct { - *mock.Call -} - -// CreateGithubEndpoint is a helper method to define mock.On call -// - ctx context.Context -// - param params.CreateGithubEndpointParams -func (_e *Store_Expecter) CreateGithubEndpoint(ctx interface{}, param interface{}) *Store_CreateGithubEndpoint_Call { - return &Store_CreateGithubEndpoint_Call{Call: _e.mock.On("CreateGithubEndpoint", ctx, param)} -} - -func (_c *Store_CreateGithubEndpoint_Call) Run(run func(ctx context.Context, param params.CreateGithubEndpointParams)) *Store_CreateGithubEndpoint_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.CreateGithubEndpointParams)) - }) - return _c -} - -func (_c *Store_CreateGithubEndpoint_Call) Return(_a0 params.ForgeEndpoint, _a1 error) *Store_CreateGithubEndpoint_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_CreateGithubEndpoint_Call) RunAndReturn(run func(context.Context, params.CreateGithubEndpointParams) (params.ForgeEndpoint, error)) *Store_CreateGithubEndpoint_Call { - _c.Call.Return(run) - return _c -} - // CreateInstance provides a mock function with given fields: ctx, poolID, param func (_m *Store) CreateInstance(ctx context.Context, poolID string, param params.CreateInstanceParams) (params.Instance, error) { ret := _m.Called(ctx, poolID, param) @@ -657,36 +218,6 @@ func (_m *Store) CreateInstance(ctx context.Context, poolID string, param params return r0, r1 } -// Store_CreateInstance_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateInstance' -type Store_CreateInstance_Call struct { - *mock.Call -} - -// CreateInstance is a helper method to define mock.On call -// - ctx context.Context -// - poolID string -// - param params.CreateInstanceParams -func (_e *Store_Expecter) CreateInstance(ctx interface{}, poolID interface{}, param interface{}) *Store_CreateInstance_Call { - return &Store_CreateInstance_Call{Call: _e.mock.On("CreateInstance", ctx, poolID, param)} -} - -func (_c *Store_CreateInstance_Call) Run(run func(ctx context.Context, poolID string, param params.CreateInstanceParams)) *Store_CreateInstance_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(params.CreateInstanceParams)) - }) - return _c -} - -func (_c *Store_CreateInstance_Call) Return(_a0 params.Instance, _a1 error) *Store_CreateInstance_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_CreateInstance_Call) RunAndReturn(run func(context.Context, string, params.CreateInstanceParams) (params.Instance, error)) *Store_CreateInstance_Call { - _c.Call.Return(run) - return _c -} - // CreateOrUpdateJob provides a mock function with given fields: ctx, job func (_m *Store) CreateOrUpdateJob(ctx context.Context, job params.Job) (params.Job, error) { ret := _m.Called(ctx, job) @@ -715,38 +246,9 @@ func (_m *Store) CreateOrUpdateJob(ctx context.Context, job params.Job) (params. return r0, r1 } -// Store_CreateOrUpdateJob_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateOrUpdateJob' -type Store_CreateOrUpdateJob_Call struct { - *mock.Call -} - -// CreateOrUpdateJob is a helper method to define mock.On call -// - ctx context.Context -// - job params.Job -func (_e *Store_Expecter) CreateOrUpdateJob(ctx interface{}, job interface{}) *Store_CreateOrUpdateJob_Call { - return &Store_CreateOrUpdateJob_Call{Call: _e.mock.On("CreateOrUpdateJob", ctx, job)} -} - -func (_c *Store_CreateOrUpdateJob_Call) Run(run func(ctx context.Context, job params.Job)) *Store_CreateOrUpdateJob_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.Job)) - }) - return _c -} - -func (_c *Store_CreateOrUpdateJob_Call) Return(_a0 params.Job, _a1 error) *Store_CreateOrUpdateJob_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_CreateOrUpdateJob_Call) RunAndReturn(run func(context.Context, params.Job) (params.Job, error)) *Store_CreateOrUpdateJob_Call { - _c.Call.Return(run) - return _c -} - -// CreateOrganization provides a mock function with given fields: ctx, name, credentials, webhookSecret, poolBalancerType -func (_m *Store) CreateOrganization(ctx context.Context, name string, credentials params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Organization, error) { - ret := _m.Called(ctx, name, credentials, webhookSecret, poolBalancerType) +// CreateOrganization provides a mock function with given fields: ctx, name, credentialsName, webhookSecret, poolBalancerType +func (_m *Store) CreateOrganization(ctx context.Context, name string, credentialsName string, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Organization, error) { + ret := _m.Called(ctx, name, credentialsName, webhookSecret, poolBalancerType) if len(ret) == 0 { panic("no return value specified for CreateOrganization") @@ -754,17 +256,17 @@ func (_m *Store) CreateOrganization(ctx context.Context, name string, credential var r0 params.Organization var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, params.ForgeCredentials, string, params.PoolBalancerType) (params.Organization, error)); ok { - return rf(ctx, name, credentials, webhookSecret, poolBalancerType) + if rf, ok := ret.Get(0).(func(context.Context, string, string, string, params.PoolBalancerType) (params.Organization, error)); ok { + return rf(ctx, name, credentialsName, webhookSecret, poolBalancerType) } - if rf, ok := ret.Get(0).(func(context.Context, string, params.ForgeCredentials, string, params.PoolBalancerType) params.Organization); ok { - r0 = rf(ctx, name, credentials, webhookSecret, poolBalancerType) + if rf, ok := ret.Get(0).(func(context.Context, string, string, string, params.PoolBalancerType) params.Organization); ok { + r0 = rf(ctx, name, credentialsName, webhookSecret, poolBalancerType) } else { r0 = ret.Get(0).(params.Organization) } - if rf, ok := ret.Get(1).(func(context.Context, string, params.ForgeCredentials, string, params.PoolBalancerType) error); ok { - r1 = rf(ctx, name, credentials, webhookSecret, poolBalancerType) + if rf, ok := ret.Get(1).(func(context.Context, string, string, string, params.PoolBalancerType) error); ok { + r1 = rf(ctx, name, credentialsName, webhookSecret, poolBalancerType) } else { r1 = ret.Error(1) } @@ -772,41 +274,9 @@ func (_m *Store) CreateOrganization(ctx context.Context, name string, credential return r0, r1 } -// Store_CreateOrganization_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateOrganization' -type Store_CreateOrganization_Call struct { - *mock.Call -} - -// CreateOrganization is a helper method to define mock.On call -// - ctx context.Context -// - name string -// - credentials params.ForgeCredentials -// - webhookSecret string -// - poolBalancerType params.PoolBalancerType -func (_e *Store_Expecter) CreateOrganization(ctx interface{}, name interface{}, credentials interface{}, webhookSecret interface{}, poolBalancerType interface{}) *Store_CreateOrganization_Call { - return &Store_CreateOrganization_Call{Call: _e.mock.On("CreateOrganization", ctx, name, credentials, webhookSecret, poolBalancerType)} -} - -func (_c *Store_CreateOrganization_Call) Run(run func(ctx context.Context, name string, credentials params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType)) *Store_CreateOrganization_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(params.ForgeCredentials), args[3].(string), args[4].(params.PoolBalancerType)) - }) - return _c -} - -func (_c *Store_CreateOrganization_Call) Return(org params.Organization, err error) *Store_CreateOrganization_Call { - _c.Call.Return(org, err) - return _c -} - -func (_c *Store_CreateOrganization_Call) RunAndReturn(run func(context.Context, string, params.ForgeCredentials, string, params.PoolBalancerType) (params.Organization, error)) *Store_CreateOrganization_Call { - _c.Call.Return(run) - return _c -} - -// CreateRepository provides a mock function with given fields: ctx, owner, name, credentials, webhookSecret, poolBalancerType -func (_m *Store) CreateRepository(ctx context.Context, owner string, name string, credentials params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Repository, error) { - ret := _m.Called(ctx, owner, name, credentials, webhookSecret, poolBalancerType) +// CreateRepository provides a mock function with given fields: ctx, owner, name, credentialsName, webhookSecret, poolBalancerType +func (_m *Store) CreateRepository(ctx context.Context, owner string, name string, credentialsName string, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Repository, error) { + ret := _m.Called(ctx, owner, name, credentialsName, webhookSecret, poolBalancerType) if len(ret) == 0 { panic("no return value specified for CreateRepository") @@ -814,17 +284,17 @@ func (_m *Store) CreateRepository(ctx context.Context, owner string, name string var r0 params.Repository var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, params.ForgeCredentials, string, params.PoolBalancerType) (params.Repository, error)); ok { - return rf(ctx, owner, name, credentials, webhookSecret, poolBalancerType) + if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, params.PoolBalancerType) (params.Repository, error)); ok { + return rf(ctx, owner, name, credentialsName, webhookSecret, poolBalancerType) } - if rf, ok := ret.Get(0).(func(context.Context, string, string, params.ForgeCredentials, string, params.PoolBalancerType) params.Repository); ok { - r0 = rf(ctx, owner, name, credentials, webhookSecret, poolBalancerType) + if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, params.PoolBalancerType) params.Repository); ok { + r0 = rf(ctx, owner, name, credentialsName, webhookSecret, poolBalancerType) } else { r0 = ret.Get(0).(params.Repository) } - if rf, ok := ret.Get(1).(func(context.Context, string, string, params.ForgeCredentials, string, params.PoolBalancerType) error); ok { - r1 = rf(ctx, owner, name, credentials, webhookSecret, poolBalancerType) + if rf, ok := ret.Get(1).(func(context.Context, string, string, string, string, params.PoolBalancerType) error); ok { + r1 = rf(ctx, owner, name, credentialsName, webhookSecret, poolBalancerType) } else { r1 = ret.Error(1) } @@ -832,97 +302,6 @@ func (_m *Store) CreateRepository(ctx context.Context, owner string, name string return r0, r1 } -// Store_CreateRepository_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateRepository' -type Store_CreateRepository_Call struct { - *mock.Call -} - -// CreateRepository is a helper method to define mock.On call -// - ctx context.Context -// - owner string -// - name string -// - credentials params.ForgeCredentials -// - webhookSecret string -// - poolBalancerType params.PoolBalancerType -func (_e *Store_Expecter) CreateRepository(ctx interface{}, owner interface{}, name interface{}, credentials interface{}, webhookSecret interface{}, poolBalancerType interface{}) *Store_CreateRepository_Call { - return &Store_CreateRepository_Call{Call: _e.mock.On("CreateRepository", ctx, owner, name, credentials, webhookSecret, poolBalancerType)} -} - -func (_c *Store_CreateRepository_Call) Run(run func(ctx context.Context, owner string, name string, credentials params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType)) *Store_CreateRepository_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(string), args[3].(params.ForgeCredentials), args[4].(string), args[5].(params.PoolBalancerType)) - }) - return _c -} - -func (_c *Store_CreateRepository_Call) Return(param params.Repository, err error) *Store_CreateRepository_Call { - _c.Call.Return(param, err) - return _c -} - -func (_c *Store_CreateRepository_Call) RunAndReturn(run func(context.Context, string, string, params.ForgeCredentials, string, params.PoolBalancerType) (params.Repository, error)) *Store_CreateRepository_Call { - _c.Call.Return(run) - return _c -} - -// CreateScaleSetInstance provides a mock function with given fields: _a0, scaleSetID, param -func (_m *Store) CreateScaleSetInstance(_a0 context.Context, scaleSetID uint, param params.CreateInstanceParams) (params.Instance, error) { - ret := _m.Called(_a0, scaleSetID, param) - - if len(ret) == 0 { - panic("no return value specified for CreateScaleSetInstance") - } - - var r0 params.Instance - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, uint, params.CreateInstanceParams) (params.Instance, error)); ok { - return rf(_a0, scaleSetID, param) - } - if rf, ok := ret.Get(0).(func(context.Context, uint, params.CreateInstanceParams) params.Instance); ok { - r0 = rf(_a0, scaleSetID, param) - } else { - r0 = ret.Get(0).(params.Instance) - } - - if rf, ok := ret.Get(1).(func(context.Context, uint, params.CreateInstanceParams) error); ok { - r1 = rf(_a0, scaleSetID, param) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Store_CreateScaleSetInstance_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateScaleSetInstance' -type Store_CreateScaleSetInstance_Call struct { - *mock.Call -} - -// CreateScaleSetInstance is a helper method to define mock.On call -// - _a0 context.Context -// - scaleSetID uint -// - param params.CreateInstanceParams -func (_e *Store_Expecter) CreateScaleSetInstance(_a0 interface{}, scaleSetID interface{}, param interface{}) *Store_CreateScaleSetInstance_Call { - return &Store_CreateScaleSetInstance_Call{Call: _e.mock.On("CreateScaleSetInstance", _a0, scaleSetID, param)} -} - -func (_c *Store_CreateScaleSetInstance_Call) Run(run func(_a0 context.Context, scaleSetID uint, param params.CreateInstanceParams)) *Store_CreateScaleSetInstance_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint), args[2].(params.CreateInstanceParams)) - }) - return _c -} - -func (_c *Store_CreateScaleSetInstance_Call) Return(instance params.Instance, err error) *Store_CreateScaleSetInstance_Call { - _c.Call.Return(instance, err) - return _c -} - -func (_c *Store_CreateScaleSetInstance_Call) RunAndReturn(run func(context.Context, uint, params.CreateInstanceParams) (params.Instance, error)) *Store_CreateScaleSetInstance_Call { - _c.Call.Return(run) - return _c -} - // CreateUser provides a mock function with given fields: ctx, user func (_m *Store) CreateUser(ctx context.Context, user params.NewUserParams) (params.User, error) { ret := _m.Called(ctx, user) @@ -951,35 +330,6 @@ func (_m *Store) CreateUser(ctx context.Context, user params.NewUserParams) (par return r0, r1 } -// Store_CreateUser_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateUser' -type Store_CreateUser_Call struct { - *mock.Call -} - -// CreateUser is a helper method to define mock.On call -// - ctx context.Context -// - user params.NewUserParams -func (_e *Store_Expecter) CreateUser(ctx interface{}, user interface{}) *Store_CreateUser_Call { - return &Store_CreateUser_Call{Call: _e.mock.On("CreateUser", ctx, user)} -} - -func (_c *Store_CreateUser_Call) Run(run func(ctx context.Context, user params.NewUserParams)) *Store_CreateUser_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.NewUserParams)) - }) - return _c -} - -func (_c *Store_CreateUser_Call) Return(_a0 params.User, _a1 error) *Store_CreateUser_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_CreateUser_Call) RunAndReturn(run func(context.Context, params.NewUserParams) (params.User, error)) *Store_CreateUser_Call { - _c.Call.Return(run) - return _c -} - // DeleteCompletedJobs provides a mock function with given fields: ctx func (_m *Store) DeleteCompletedJobs(ctx context.Context) error { ret := _m.Called(ctx) @@ -998,34 +348,6 @@ func (_m *Store) DeleteCompletedJobs(ctx context.Context) error { return r0 } -// Store_DeleteCompletedJobs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteCompletedJobs' -type Store_DeleteCompletedJobs_Call struct { - *mock.Call -} - -// DeleteCompletedJobs is a helper method to define mock.On call -// - ctx context.Context -func (_e *Store_Expecter) DeleteCompletedJobs(ctx interface{}) *Store_DeleteCompletedJobs_Call { - return &Store_DeleteCompletedJobs_Call{Call: _e.mock.On("DeleteCompletedJobs", ctx)} -} - -func (_c *Store_DeleteCompletedJobs_Call) Run(run func(ctx context.Context)) *Store_DeleteCompletedJobs_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *Store_DeleteCompletedJobs_Call) Return(_a0 error) *Store_DeleteCompletedJobs_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Store_DeleteCompletedJobs_Call) RunAndReturn(run func(context.Context) error) *Store_DeleteCompletedJobs_Call { - _c.Call.Return(run) - return _c -} - // DeleteEnterprise provides a mock function with given fields: ctx, enterpriseID func (_m *Store) DeleteEnterprise(ctx context.Context, enterpriseID string) error { ret := _m.Called(ctx, enterpriseID) @@ -1044,37 +366,8 @@ func (_m *Store) DeleteEnterprise(ctx context.Context, enterpriseID string) erro return r0 } -// Store_DeleteEnterprise_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteEnterprise' -type Store_DeleteEnterprise_Call struct { - *mock.Call -} - -// DeleteEnterprise is a helper method to define mock.On call -// - ctx context.Context -// - enterpriseID string -func (_e *Store_Expecter) DeleteEnterprise(ctx interface{}, enterpriseID interface{}) *Store_DeleteEnterprise_Call { - return &Store_DeleteEnterprise_Call{Call: _e.mock.On("DeleteEnterprise", ctx, enterpriseID)} -} - -func (_c *Store_DeleteEnterprise_Call) Run(run func(ctx context.Context, enterpriseID string)) *Store_DeleteEnterprise_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) - }) - return _c -} - -func (_c *Store_DeleteEnterprise_Call) Return(_a0 error) *Store_DeleteEnterprise_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Store_DeleteEnterprise_Call) RunAndReturn(run func(context.Context, string) error) *Store_DeleteEnterprise_Call { - _c.Call.Return(run) - return _c -} - // DeleteEntityPool provides a mock function with given fields: ctx, entity, poolID -func (_m *Store) DeleteEntityPool(ctx context.Context, entity params.ForgeEntity, poolID string) error { +func (_m *Store) DeleteEntityPool(ctx context.Context, entity params.GithubEntity, poolID string) error { ret := _m.Called(ctx, entity, poolID) if len(ret) == 0 { @@ -1082,7 +375,7 @@ func (_m *Store) DeleteEntityPool(ctx context.Context, entity params.ForgeEntity } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, string) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, string) error); ok { r0 = rf(ctx, entity, poolID) } else { r0 = ret.Error(0) @@ -1091,130 +384,6 @@ func (_m *Store) DeleteEntityPool(ctx context.Context, entity params.ForgeEntity return r0 } -// Store_DeleteEntityPool_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteEntityPool' -type Store_DeleteEntityPool_Call struct { - *mock.Call -} - -// DeleteEntityPool is a helper method to define mock.On call -// - ctx context.Context -// - entity params.ForgeEntity -// - poolID string -func (_e *Store_Expecter) DeleteEntityPool(ctx interface{}, entity interface{}, poolID interface{}) *Store_DeleteEntityPool_Call { - return &Store_DeleteEntityPool_Call{Call: _e.mock.On("DeleteEntityPool", ctx, entity, poolID)} -} - -func (_c *Store_DeleteEntityPool_Call) Run(run func(ctx context.Context, entity params.ForgeEntity, poolID string)) *Store_DeleteEntityPool_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.ForgeEntity), args[2].(string)) - }) - return _c -} - -func (_c *Store_DeleteEntityPool_Call) Return(_a0 error) *Store_DeleteEntityPool_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Store_DeleteEntityPool_Call) RunAndReturn(run func(context.Context, params.ForgeEntity, string) error) *Store_DeleteEntityPool_Call { - _c.Call.Return(run) - return _c -} - -// DeleteGiteaCredentials provides a mock function with given fields: ctx, id -func (_m *Store) DeleteGiteaCredentials(ctx context.Context, id uint) error { - ret := _m.Called(ctx, id) - - if len(ret) == 0 { - panic("no return value specified for DeleteGiteaCredentials") - } - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, uint) error); ok { - r0 = rf(ctx, id) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// Store_DeleteGiteaCredentials_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteGiteaCredentials' -type Store_DeleteGiteaCredentials_Call struct { - *mock.Call -} - -// DeleteGiteaCredentials is a helper method to define mock.On call -// - ctx context.Context -// - id uint -func (_e *Store_Expecter) DeleteGiteaCredentials(ctx interface{}, id interface{}) *Store_DeleteGiteaCredentials_Call { - return &Store_DeleteGiteaCredentials_Call{Call: _e.mock.On("DeleteGiteaCredentials", ctx, id)} -} - -func (_c *Store_DeleteGiteaCredentials_Call) Run(run func(ctx context.Context, id uint)) *Store_DeleteGiteaCredentials_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint)) - }) - return _c -} - -func (_c *Store_DeleteGiteaCredentials_Call) Return(err error) *Store_DeleteGiteaCredentials_Call { - _c.Call.Return(err) - return _c -} - -func (_c *Store_DeleteGiteaCredentials_Call) RunAndReturn(run func(context.Context, uint) error) *Store_DeleteGiteaCredentials_Call { - _c.Call.Return(run) - return _c -} - -// DeleteGiteaEndpoint provides a mock function with given fields: _a0, name -func (_m *Store) DeleteGiteaEndpoint(_a0 context.Context, name string) error { - ret := _m.Called(_a0, name) - - if len(ret) == 0 { - panic("no return value specified for DeleteGiteaEndpoint") - } - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { - r0 = rf(_a0, name) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// Store_DeleteGiteaEndpoint_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteGiteaEndpoint' -type Store_DeleteGiteaEndpoint_Call struct { - *mock.Call -} - -// DeleteGiteaEndpoint is a helper method to define mock.On call -// - _a0 context.Context -// - name string -func (_e *Store_Expecter) DeleteGiteaEndpoint(_a0 interface{}, name interface{}) *Store_DeleteGiteaEndpoint_Call { - return &Store_DeleteGiteaEndpoint_Call{Call: _e.mock.On("DeleteGiteaEndpoint", _a0, name)} -} - -func (_c *Store_DeleteGiteaEndpoint_Call) Run(run func(_a0 context.Context, name string)) *Store_DeleteGiteaEndpoint_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) - }) - return _c -} - -func (_c *Store_DeleteGiteaEndpoint_Call) Return(err error) *Store_DeleteGiteaEndpoint_Call { - _c.Call.Return(err) - return _c -} - -func (_c *Store_DeleteGiteaEndpoint_Call) RunAndReturn(run func(context.Context, string) error) *Store_DeleteGiteaEndpoint_Call { - _c.Call.Return(run) - return _c -} - // DeleteGithubCredentials provides a mock function with given fields: ctx, id func (_m *Store) DeleteGithubCredentials(ctx context.Context, id uint) error { ret := _m.Called(ctx, id) @@ -1233,35 +402,6 @@ func (_m *Store) DeleteGithubCredentials(ctx context.Context, id uint) error { return r0 } -// Store_DeleteGithubCredentials_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteGithubCredentials' -type Store_DeleteGithubCredentials_Call struct { - *mock.Call -} - -// DeleteGithubCredentials is a helper method to define mock.On call -// - ctx context.Context -// - id uint -func (_e *Store_Expecter) DeleteGithubCredentials(ctx interface{}, id interface{}) *Store_DeleteGithubCredentials_Call { - return &Store_DeleteGithubCredentials_Call{Call: _e.mock.On("DeleteGithubCredentials", ctx, id)} -} - -func (_c *Store_DeleteGithubCredentials_Call) Run(run func(ctx context.Context, id uint)) *Store_DeleteGithubCredentials_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint)) - }) - return _c -} - -func (_c *Store_DeleteGithubCredentials_Call) Return(_a0 error) *Store_DeleteGithubCredentials_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Store_DeleteGithubCredentials_Call) RunAndReturn(run func(context.Context, uint) error) *Store_DeleteGithubCredentials_Call { - _c.Call.Return(run) - return _c -} - // DeleteGithubEndpoint provides a mock function with given fields: ctx, name func (_m *Store) DeleteGithubEndpoint(ctx context.Context, name string) error { ret := _m.Called(ctx, name) @@ -1280,38 +420,9 @@ func (_m *Store) DeleteGithubEndpoint(ctx context.Context, name string) error { return r0 } -// Store_DeleteGithubEndpoint_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteGithubEndpoint' -type Store_DeleteGithubEndpoint_Call struct { - *mock.Call -} - -// DeleteGithubEndpoint is a helper method to define mock.On call -// - ctx context.Context -// - name string -func (_e *Store_Expecter) DeleteGithubEndpoint(ctx interface{}, name interface{}) *Store_DeleteGithubEndpoint_Call { - return &Store_DeleteGithubEndpoint_Call{Call: _e.mock.On("DeleteGithubEndpoint", ctx, name)} -} - -func (_c *Store_DeleteGithubEndpoint_Call) Run(run func(ctx context.Context, name string)) *Store_DeleteGithubEndpoint_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) - }) - return _c -} - -func (_c *Store_DeleteGithubEndpoint_Call) Return(_a0 error) *Store_DeleteGithubEndpoint_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Store_DeleteGithubEndpoint_Call) RunAndReturn(run func(context.Context, string) error) *Store_DeleteGithubEndpoint_Call { - _c.Call.Return(run) - return _c -} - -// DeleteInstance provides a mock function with given fields: ctx, poolID, instanceNameOrID -func (_m *Store) DeleteInstance(ctx context.Context, poolID string, instanceNameOrID string) error { - ret := _m.Called(ctx, poolID, instanceNameOrID) +// DeleteInstance provides a mock function with given fields: ctx, poolID, instanceName +func (_m *Store) DeleteInstance(ctx context.Context, poolID string, instanceName string) error { + ret := _m.Called(ctx, poolID, instanceName) if len(ret) == 0 { panic("no return value specified for DeleteInstance") @@ -1319,7 +430,7 @@ func (_m *Store) DeleteInstance(ctx context.Context, poolID string, instanceName var r0 error if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok { - r0 = rf(ctx, poolID, instanceNameOrID) + r0 = rf(ctx, poolID, instanceName) } else { r0 = ret.Error(0) } @@ -1327,83 +438,6 @@ func (_m *Store) DeleteInstance(ctx context.Context, poolID string, instanceName return r0 } -// Store_DeleteInstance_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteInstance' -type Store_DeleteInstance_Call struct { - *mock.Call -} - -// DeleteInstance is a helper method to define mock.On call -// - ctx context.Context -// - poolID string -// - instanceNameOrID string -func (_e *Store_Expecter) DeleteInstance(ctx interface{}, poolID interface{}, instanceNameOrID interface{}) *Store_DeleteInstance_Call { - return &Store_DeleteInstance_Call{Call: _e.mock.On("DeleteInstance", ctx, poolID, instanceNameOrID)} -} - -func (_c *Store_DeleteInstance_Call) Run(run func(ctx context.Context, poolID string, instanceNameOrID string)) *Store_DeleteInstance_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(string)) - }) - return _c -} - -func (_c *Store_DeleteInstance_Call) Return(_a0 error) *Store_DeleteInstance_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Store_DeleteInstance_Call) RunAndReturn(run func(context.Context, string, string) error) *Store_DeleteInstance_Call { - _c.Call.Return(run) - return _c -} - -// DeleteInstanceByName provides a mock function with given fields: ctx, instanceName -func (_m *Store) DeleteInstanceByName(ctx context.Context, instanceName string) error { - ret := _m.Called(ctx, instanceName) - - if len(ret) == 0 { - panic("no return value specified for DeleteInstanceByName") - } - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { - r0 = rf(ctx, instanceName) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// Store_DeleteInstanceByName_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteInstanceByName' -type Store_DeleteInstanceByName_Call struct { - *mock.Call -} - -// DeleteInstanceByName is a helper method to define mock.On call -// - ctx context.Context -// - instanceName string -func (_e *Store_Expecter) DeleteInstanceByName(ctx interface{}, instanceName interface{}) *Store_DeleteInstanceByName_Call { - return &Store_DeleteInstanceByName_Call{Call: _e.mock.On("DeleteInstanceByName", ctx, instanceName)} -} - -func (_c *Store_DeleteInstanceByName_Call) Run(run func(ctx context.Context, instanceName string)) *Store_DeleteInstanceByName_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) - }) - return _c -} - -func (_c *Store_DeleteInstanceByName_Call) Return(_a0 error) *Store_DeleteInstanceByName_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Store_DeleteInstanceByName_Call) RunAndReturn(run func(context.Context, string) error) *Store_DeleteInstanceByName_Call { - _c.Call.Return(run) - return _c -} - // DeleteJob provides a mock function with given fields: ctx, jobID func (_m *Store) DeleteJob(ctx context.Context, jobID int64) error { ret := _m.Called(ctx, jobID) @@ -1422,35 +456,6 @@ func (_m *Store) DeleteJob(ctx context.Context, jobID int64) error { return r0 } -// Store_DeleteJob_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteJob' -type Store_DeleteJob_Call struct { - *mock.Call -} - -// DeleteJob is a helper method to define mock.On call -// - ctx context.Context -// - jobID int64 -func (_e *Store_Expecter) DeleteJob(ctx interface{}, jobID interface{}) *Store_DeleteJob_Call { - return &Store_DeleteJob_Call{Call: _e.mock.On("DeleteJob", ctx, jobID)} -} - -func (_c *Store_DeleteJob_Call) Run(run func(ctx context.Context, jobID int64)) *Store_DeleteJob_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(int64)) - }) - return _c -} - -func (_c *Store_DeleteJob_Call) Return(_a0 error) *Store_DeleteJob_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Store_DeleteJob_Call) RunAndReturn(run func(context.Context, int64) error) *Store_DeleteJob_Call { - _c.Call.Return(run) - return _c -} - // DeleteOrganization provides a mock function with given fields: ctx, orgID func (_m *Store) DeleteOrganization(ctx context.Context, orgID string) error { ret := _m.Called(ctx, orgID) @@ -1469,35 +474,6 @@ func (_m *Store) DeleteOrganization(ctx context.Context, orgID string) error { return r0 } -// Store_DeleteOrganization_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteOrganization' -type Store_DeleteOrganization_Call struct { - *mock.Call -} - -// DeleteOrganization is a helper method to define mock.On call -// - ctx context.Context -// - orgID string -func (_e *Store_Expecter) DeleteOrganization(ctx interface{}, orgID interface{}) *Store_DeleteOrganization_Call { - return &Store_DeleteOrganization_Call{Call: _e.mock.On("DeleteOrganization", ctx, orgID)} -} - -func (_c *Store_DeleteOrganization_Call) Run(run func(ctx context.Context, orgID string)) *Store_DeleteOrganization_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) - }) - return _c -} - -func (_c *Store_DeleteOrganization_Call) Return(_a0 error) *Store_DeleteOrganization_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Store_DeleteOrganization_Call) RunAndReturn(run func(context.Context, string) error) *Store_DeleteOrganization_Call { - _c.Call.Return(run) - return _c -} - // DeletePoolByID provides a mock function with given fields: ctx, poolID func (_m *Store) DeletePoolByID(ctx context.Context, poolID string) error { ret := _m.Called(ctx, poolID) @@ -1516,35 +492,6 @@ func (_m *Store) DeletePoolByID(ctx context.Context, poolID string) error { return r0 } -// Store_DeletePoolByID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeletePoolByID' -type Store_DeletePoolByID_Call struct { - *mock.Call -} - -// DeletePoolByID is a helper method to define mock.On call -// - ctx context.Context -// - poolID string -func (_e *Store_Expecter) DeletePoolByID(ctx interface{}, poolID interface{}) *Store_DeletePoolByID_Call { - return &Store_DeletePoolByID_Call{Call: _e.mock.On("DeletePoolByID", ctx, poolID)} -} - -func (_c *Store_DeletePoolByID_Call) Run(run func(ctx context.Context, poolID string)) *Store_DeletePoolByID_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) - }) - return _c -} - -func (_c *Store_DeletePoolByID_Call) Return(_a0 error) *Store_DeletePoolByID_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Store_DeletePoolByID_Call) RunAndReturn(run func(context.Context, string) error) *Store_DeletePoolByID_Call { - _c.Call.Return(run) - return _c -} - // DeleteRepository provides a mock function with given fields: ctx, repoID func (_m *Store) DeleteRepository(ctx context.Context, repoID string) error { ret := _m.Called(ctx, repoID) @@ -1563,84 +510,8 @@ func (_m *Store) DeleteRepository(ctx context.Context, repoID string) error { return r0 } -// Store_DeleteRepository_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteRepository' -type Store_DeleteRepository_Call struct { - *mock.Call -} - -// DeleteRepository is a helper method to define mock.On call -// - ctx context.Context -// - repoID string -func (_e *Store_Expecter) DeleteRepository(ctx interface{}, repoID interface{}) *Store_DeleteRepository_Call { - return &Store_DeleteRepository_Call{Call: _e.mock.On("DeleteRepository", ctx, repoID)} -} - -func (_c *Store_DeleteRepository_Call) Run(run func(ctx context.Context, repoID string)) *Store_DeleteRepository_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) - }) - return _c -} - -func (_c *Store_DeleteRepository_Call) Return(_a0 error) *Store_DeleteRepository_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Store_DeleteRepository_Call) RunAndReturn(run func(context.Context, string) error) *Store_DeleteRepository_Call { - _c.Call.Return(run) - return _c -} - -// DeleteScaleSetByID provides a mock function with given fields: ctx, scaleSetID -func (_m *Store) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) error { - ret := _m.Called(ctx, scaleSetID) - - if len(ret) == 0 { - panic("no return value specified for DeleteScaleSetByID") - } - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, uint) error); ok { - r0 = rf(ctx, scaleSetID) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// Store_DeleteScaleSetByID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteScaleSetByID' -type Store_DeleteScaleSetByID_Call struct { - *mock.Call -} - -// DeleteScaleSetByID is a helper method to define mock.On call -// - ctx context.Context -// - scaleSetID uint -func (_e *Store_Expecter) DeleteScaleSetByID(ctx interface{}, scaleSetID interface{}) *Store_DeleteScaleSetByID_Call { - return &Store_DeleteScaleSetByID_Call{Call: _e.mock.On("DeleteScaleSetByID", ctx, scaleSetID)} -} - -func (_c *Store_DeleteScaleSetByID_Call) Run(run func(ctx context.Context, scaleSetID uint)) *Store_DeleteScaleSetByID_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint)) - }) - return _c -} - -func (_c *Store_DeleteScaleSetByID_Call) Return(err error) *Store_DeleteScaleSetByID_Call { - _c.Call.Return(err) - return _c -} - -func (_c *Store_DeleteScaleSetByID_Call) RunAndReturn(run func(context.Context, uint) error) *Store_DeleteScaleSetByID_Call { - _c.Call.Return(run) - return _c -} - // FindPoolsMatchingAllTags provides a mock function with given fields: ctx, entityType, entityID, tags -func (_m *Store) FindPoolsMatchingAllTags(ctx context.Context, entityType params.ForgeEntityType, entityID string, tags []string) ([]params.Pool, error) { +func (_m *Store) FindPoolsMatchingAllTags(ctx context.Context, entityType params.GithubEntityType, entityID string, tags []string) ([]params.Pool, error) { ret := _m.Called(ctx, entityType, entityID, tags) if len(ret) == 0 { @@ -1649,10 +520,10 @@ func (_m *Store) FindPoolsMatchingAllTags(ctx context.Context, entityType params var r0 []params.Pool var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntityType, string, []string) ([]params.Pool, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntityType, string, []string) ([]params.Pool, error)); ok { return rf(ctx, entityType, entityID, tags) } - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntityType, string, []string) []params.Pool); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntityType, string, []string) []params.Pool); ok { r0 = rf(ctx, entityType, entityID, tags) } else { if ret.Get(0) != nil { @@ -1660,7 +531,7 @@ func (_m *Store) FindPoolsMatchingAllTags(ctx context.Context, entityType params } } - if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntityType, string, []string) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, params.GithubEntityType, string, []string) error); ok { r1 = rf(ctx, entityType, entityID, tags) } else { r1 = ret.Error(1) @@ -1669,37 +540,6 @@ func (_m *Store) FindPoolsMatchingAllTags(ctx context.Context, entityType params return r0, r1 } -// Store_FindPoolsMatchingAllTags_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'FindPoolsMatchingAllTags' -type Store_FindPoolsMatchingAllTags_Call struct { - *mock.Call -} - -// FindPoolsMatchingAllTags is a helper method to define mock.On call -// - ctx context.Context -// - entityType params.ForgeEntityType -// - entityID string -// - tags []string -func (_e *Store_Expecter) FindPoolsMatchingAllTags(ctx interface{}, entityType interface{}, entityID interface{}, tags interface{}) *Store_FindPoolsMatchingAllTags_Call { - return &Store_FindPoolsMatchingAllTags_Call{Call: _e.mock.On("FindPoolsMatchingAllTags", ctx, entityType, entityID, tags)} -} - -func (_c *Store_FindPoolsMatchingAllTags_Call) Run(run func(ctx context.Context, entityType params.ForgeEntityType, entityID string, tags []string)) *Store_FindPoolsMatchingAllTags_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.ForgeEntityType), args[2].(string), args[3].([]string)) - }) - return _c -} - -func (_c *Store_FindPoolsMatchingAllTags_Call) Return(_a0 []params.Pool, _a1 error) *Store_FindPoolsMatchingAllTags_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_FindPoolsMatchingAllTags_Call) RunAndReturn(run func(context.Context, params.ForgeEntityType, string, []string) ([]params.Pool, error)) *Store_FindPoolsMatchingAllTags_Call { - _c.Call.Return(run) - return _c -} - // GetAdminUser provides a mock function with given fields: ctx func (_m *Store) GetAdminUser(ctx context.Context) (params.User, error) { ret := _m.Called(ctx) @@ -1728,34 +568,6 @@ func (_m *Store) GetAdminUser(ctx context.Context) (params.User, error) { return r0, r1 } -// Store_GetAdminUser_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetAdminUser' -type Store_GetAdminUser_Call struct { - *mock.Call -} - -// GetAdminUser is a helper method to define mock.On call -// - ctx context.Context -func (_e *Store_Expecter) GetAdminUser(ctx interface{}) *Store_GetAdminUser_Call { - return &Store_GetAdminUser_Call{Call: _e.mock.On("GetAdminUser", ctx)} -} - -func (_c *Store_GetAdminUser_Call) Run(run func(ctx context.Context)) *Store_GetAdminUser_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *Store_GetAdminUser_Call) Return(_a0 params.User, _a1 error) *Store_GetAdminUser_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_GetAdminUser_Call) RunAndReturn(run func(context.Context) (params.User, error)) *Store_GetAdminUser_Call { - _c.Call.Return(run) - return _c -} - // GetEnterprise provides a mock function with given fields: ctx, name, endpointName func (_m *Store) GetEnterprise(ctx context.Context, name string, endpointName string) (params.Enterprise, error) { ret := _m.Called(ctx, name, endpointName) @@ -1784,36 +596,6 @@ func (_m *Store) GetEnterprise(ctx context.Context, name string, endpointName st return r0, r1 } -// Store_GetEnterprise_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEnterprise' -type Store_GetEnterprise_Call struct { - *mock.Call -} - -// GetEnterprise is a helper method to define mock.On call -// - ctx context.Context -// - name string -// - endpointName string -func (_e *Store_Expecter) GetEnterprise(ctx interface{}, name interface{}, endpointName interface{}) *Store_GetEnterprise_Call { - return &Store_GetEnterprise_Call{Call: _e.mock.On("GetEnterprise", ctx, name, endpointName)} -} - -func (_c *Store_GetEnterprise_Call) Run(run func(ctx context.Context, name string, endpointName string)) *Store_GetEnterprise_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(string)) - }) - return _c -} - -func (_c *Store_GetEnterprise_Call) Return(_a0 params.Enterprise, _a1 error) *Store_GetEnterprise_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_GetEnterprise_Call) RunAndReturn(run func(context.Context, string, string) (params.Enterprise, error)) *Store_GetEnterprise_Call { - _c.Call.Return(run) - return _c -} - // GetEnterpriseByID provides a mock function with given fields: ctx, enterpriseID func (_m *Store) GetEnterpriseByID(ctx context.Context, enterpriseID string) (params.Enterprise, error) { ret := _m.Called(ctx, enterpriseID) @@ -1842,37 +624,8 @@ func (_m *Store) GetEnterpriseByID(ctx context.Context, enterpriseID string) (pa return r0, r1 } -// Store_GetEnterpriseByID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEnterpriseByID' -type Store_GetEnterpriseByID_Call struct { - *mock.Call -} - -// GetEnterpriseByID is a helper method to define mock.On call -// - ctx context.Context -// - enterpriseID string -func (_e *Store_Expecter) GetEnterpriseByID(ctx interface{}, enterpriseID interface{}) *Store_GetEnterpriseByID_Call { - return &Store_GetEnterpriseByID_Call{Call: _e.mock.On("GetEnterpriseByID", ctx, enterpriseID)} -} - -func (_c *Store_GetEnterpriseByID_Call) Run(run func(ctx context.Context, enterpriseID string)) *Store_GetEnterpriseByID_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) - }) - return _c -} - -func (_c *Store_GetEnterpriseByID_Call) Return(_a0 params.Enterprise, _a1 error) *Store_GetEnterpriseByID_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_GetEnterpriseByID_Call) RunAndReturn(run func(context.Context, string) (params.Enterprise, error)) *Store_GetEnterpriseByID_Call { - _c.Call.Return(run) - return _c -} - // GetEntityPool provides a mock function with given fields: ctx, entity, poolID -func (_m *Store) GetEntityPool(ctx context.Context, entity params.ForgeEntity, poolID string) (params.Pool, error) { +func (_m *Store) GetEntityPool(ctx context.Context, entity params.GithubEntity, poolID string) (params.Pool, error) { ret := _m.Called(ctx, entity, poolID) if len(ret) == 0 { @@ -1881,16 +634,16 @@ func (_m *Store) GetEntityPool(ctx context.Context, entity params.ForgeEntity, p var r0 params.Pool var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, string) (params.Pool, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, string) (params.Pool, error)); ok { return rf(ctx, entity, poolID) } - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, string) params.Pool); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, string) params.Pool); ok { r0 = rf(ctx, entity, poolID) } else { r0 = ret.Get(0).(params.Pool) } - if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntity, string) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, params.GithubEntity, string) error); ok { r1 = rf(ctx, entity, poolID) } else { r1 = ret.Error(1) @@ -1899,284 +652,23 @@ func (_m *Store) GetEntityPool(ctx context.Context, entity params.ForgeEntity, p return r0, r1 } -// Store_GetEntityPool_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEntityPool' -type Store_GetEntityPool_Call struct { - *mock.Call -} - -// GetEntityPool is a helper method to define mock.On call -// - ctx context.Context -// - entity params.ForgeEntity -// - poolID string -func (_e *Store_Expecter) GetEntityPool(ctx interface{}, entity interface{}, poolID interface{}) *Store_GetEntityPool_Call { - return &Store_GetEntityPool_Call{Call: _e.mock.On("GetEntityPool", ctx, entity, poolID)} -} - -func (_c *Store_GetEntityPool_Call) Run(run func(ctx context.Context, entity params.ForgeEntity, poolID string)) *Store_GetEntityPool_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.ForgeEntity), args[2].(string)) - }) - return _c -} - -func (_c *Store_GetEntityPool_Call) Return(_a0 params.Pool, _a1 error) *Store_GetEntityPool_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_GetEntityPool_Call) RunAndReturn(run func(context.Context, params.ForgeEntity, string) (params.Pool, error)) *Store_GetEntityPool_Call { - _c.Call.Return(run) - return _c -} - -// GetForgeEntity provides a mock function with given fields: _a0, entityType, entityID -func (_m *Store) GetForgeEntity(_a0 context.Context, entityType params.ForgeEntityType, entityID string) (params.ForgeEntity, error) { - ret := _m.Called(_a0, entityType, entityID) - - if len(ret) == 0 { - panic("no return value specified for GetForgeEntity") - } - - var r0 params.ForgeEntity - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntityType, string) (params.ForgeEntity, error)); ok { - return rf(_a0, entityType, entityID) - } - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntityType, string) params.ForgeEntity); ok { - r0 = rf(_a0, entityType, entityID) - } else { - r0 = ret.Get(0).(params.ForgeEntity) - } - - if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntityType, string) error); ok { - r1 = rf(_a0, entityType, entityID) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Store_GetForgeEntity_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetForgeEntity' -type Store_GetForgeEntity_Call struct { - *mock.Call -} - -// GetForgeEntity is a helper method to define mock.On call -// - _a0 context.Context -// - entityType params.ForgeEntityType -// - entityID string -func (_e *Store_Expecter) GetForgeEntity(_a0 interface{}, entityType interface{}, entityID interface{}) *Store_GetForgeEntity_Call { - return &Store_GetForgeEntity_Call{Call: _e.mock.On("GetForgeEntity", _a0, entityType, entityID)} -} - -func (_c *Store_GetForgeEntity_Call) Run(run func(_a0 context.Context, entityType params.ForgeEntityType, entityID string)) *Store_GetForgeEntity_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.ForgeEntityType), args[2].(string)) - }) - return _c -} - -func (_c *Store_GetForgeEntity_Call) Return(_a0 params.ForgeEntity, _a1 error) *Store_GetForgeEntity_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_GetForgeEntity_Call) RunAndReturn(run func(context.Context, params.ForgeEntityType, string) (params.ForgeEntity, error)) *Store_GetForgeEntity_Call { - _c.Call.Return(run) - return _c -} - -// GetGiteaCredentials provides a mock function with given fields: ctx, id, detailed -func (_m *Store) GetGiteaCredentials(ctx context.Context, id uint, detailed bool) (params.ForgeCredentials, error) { - ret := _m.Called(ctx, id, detailed) - - if len(ret) == 0 { - panic("no return value specified for GetGiteaCredentials") - } - - var r0 params.ForgeCredentials - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, uint, bool) (params.ForgeCredentials, error)); ok { - return rf(ctx, id, detailed) - } - if rf, ok := ret.Get(0).(func(context.Context, uint, bool) params.ForgeCredentials); ok { - r0 = rf(ctx, id, detailed) - } else { - r0 = ret.Get(0).(params.ForgeCredentials) - } - - if rf, ok := ret.Get(1).(func(context.Context, uint, bool) error); ok { - r1 = rf(ctx, id, detailed) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Store_GetGiteaCredentials_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetGiteaCredentials' -type Store_GetGiteaCredentials_Call struct { - *mock.Call -} - -// GetGiteaCredentials is a helper method to define mock.On call -// - ctx context.Context -// - id uint -// - detailed bool -func (_e *Store_Expecter) GetGiteaCredentials(ctx interface{}, id interface{}, detailed interface{}) *Store_GetGiteaCredentials_Call { - return &Store_GetGiteaCredentials_Call{Call: _e.mock.On("GetGiteaCredentials", ctx, id, detailed)} -} - -func (_c *Store_GetGiteaCredentials_Call) Run(run func(ctx context.Context, id uint, detailed bool)) *Store_GetGiteaCredentials_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint), args[2].(bool)) - }) - return _c -} - -func (_c *Store_GetGiteaCredentials_Call) Return(_a0 params.ForgeCredentials, _a1 error) *Store_GetGiteaCredentials_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_GetGiteaCredentials_Call) RunAndReturn(run func(context.Context, uint, bool) (params.ForgeCredentials, error)) *Store_GetGiteaCredentials_Call { - _c.Call.Return(run) - return _c -} - -// GetGiteaCredentialsByName provides a mock function with given fields: ctx, name, detailed -func (_m *Store) GetGiteaCredentialsByName(ctx context.Context, name string, detailed bool) (params.ForgeCredentials, error) { - ret := _m.Called(ctx, name, detailed) - - if len(ret) == 0 { - panic("no return value specified for GetGiteaCredentialsByName") - } - - var r0 params.ForgeCredentials - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, bool) (params.ForgeCredentials, error)); ok { - return rf(ctx, name, detailed) - } - if rf, ok := ret.Get(0).(func(context.Context, string, bool) params.ForgeCredentials); ok { - r0 = rf(ctx, name, detailed) - } else { - r0 = ret.Get(0).(params.ForgeCredentials) - } - - if rf, ok := ret.Get(1).(func(context.Context, string, bool) error); ok { - r1 = rf(ctx, name, detailed) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Store_GetGiteaCredentialsByName_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetGiteaCredentialsByName' -type Store_GetGiteaCredentialsByName_Call struct { - *mock.Call -} - -// GetGiteaCredentialsByName is a helper method to define mock.On call -// - ctx context.Context -// - name string -// - detailed bool -func (_e *Store_Expecter) GetGiteaCredentialsByName(ctx interface{}, name interface{}, detailed interface{}) *Store_GetGiteaCredentialsByName_Call { - return &Store_GetGiteaCredentialsByName_Call{Call: _e.mock.On("GetGiteaCredentialsByName", ctx, name, detailed)} -} - -func (_c *Store_GetGiteaCredentialsByName_Call) Run(run func(ctx context.Context, name string, detailed bool)) *Store_GetGiteaCredentialsByName_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(bool)) - }) - return _c -} - -func (_c *Store_GetGiteaCredentialsByName_Call) Return(_a0 params.ForgeCredentials, _a1 error) *Store_GetGiteaCredentialsByName_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_GetGiteaCredentialsByName_Call) RunAndReturn(run func(context.Context, string, bool) (params.ForgeCredentials, error)) *Store_GetGiteaCredentialsByName_Call { - _c.Call.Return(run) - return _c -} - -// GetGiteaEndpoint provides a mock function with given fields: _a0, name -func (_m *Store) GetGiteaEndpoint(_a0 context.Context, name string) (params.ForgeEndpoint, error) { - ret := _m.Called(_a0, name) - - if len(ret) == 0 { - panic("no return value specified for GetGiteaEndpoint") - } - - var r0 params.ForgeEndpoint - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (params.ForgeEndpoint, error)); ok { - return rf(_a0, name) - } - if rf, ok := ret.Get(0).(func(context.Context, string) params.ForgeEndpoint); ok { - r0 = rf(_a0, name) - } else { - r0 = ret.Get(0).(params.ForgeEndpoint) - } - - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(_a0, name) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Store_GetGiteaEndpoint_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetGiteaEndpoint' -type Store_GetGiteaEndpoint_Call struct { - *mock.Call -} - -// GetGiteaEndpoint is a helper method to define mock.On call -// - _a0 context.Context -// - name string -func (_e *Store_Expecter) GetGiteaEndpoint(_a0 interface{}, name interface{}) *Store_GetGiteaEndpoint_Call { - return &Store_GetGiteaEndpoint_Call{Call: _e.mock.On("GetGiteaEndpoint", _a0, name)} -} - -func (_c *Store_GetGiteaEndpoint_Call) Run(run func(_a0 context.Context, name string)) *Store_GetGiteaEndpoint_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) - }) - return _c -} - -func (_c *Store_GetGiteaEndpoint_Call) Return(_a0 params.ForgeEndpoint, _a1 error) *Store_GetGiteaEndpoint_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_GetGiteaEndpoint_Call) RunAndReturn(run func(context.Context, string) (params.ForgeEndpoint, error)) *Store_GetGiteaEndpoint_Call { - _c.Call.Return(run) - return _c -} - // GetGithubCredentials provides a mock function with given fields: ctx, id, detailed -func (_m *Store) GetGithubCredentials(ctx context.Context, id uint, detailed bool) (params.ForgeCredentials, error) { +func (_m *Store) GetGithubCredentials(ctx context.Context, id uint, detailed bool) (params.GithubCredentials, error) { ret := _m.Called(ctx, id, detailed) if len(ret) == 0 { panic("no return value specified for GetGithubCredentials") } - var r0 params.ForgeCredentials + var r0 params.GithubCredentials var r1 error - if rf, ok := ret.Get(0).(func(context.Context, uint, bool) (params.ForgeCredentials, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, uint, bool) (params.GithubCredentials, error)); ok { return rf(ctx, id, detailed) } - if rf, ok := ret.Get(0).(func(context.Context, uint, bool) params.ForgeCredentials); ok { + if rf, ok := ret.Get(0).(func(context.Context, uint, bool) params.GithubCredentials); ok { r0 = rf(ctx, id, detailed) } else { - r0 = ret.Get(0).(params.ForgeCredentials) + r0 = ret.Get(0).(params.GithubCredentials) } if rf, ok := ret.Get(1).(func(context.Context, uint, bool) error); ok { @@ -2188,53 +680,23 @@ func (_m *Store) GetGithubCredentials(ctx context.Context, id uint, detailed boo return r0, r1 } -// Store_GetGithubCredentials_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetGithubCredentials' -type Store_GetGithubCredentials_Call struct { - *mock.Call -} - -// GetGithubCredentials is a helper method to define mock.On call -// - ctx context.Context -// - id uint -// - detailed bool -func (_e *Store_Expecter) GetGithubCredentials(ctx interface{}, id interface{}, detailed interface{}) *Store_GetGithubCredentials_Call { - return &Store_GetGithubCredentials_Call{Call: _e.mock.On("GetGithubCredentials", ctx, id, detailed)} -} - -func (_c *Store_GetGithubCredentials_Call) Run(run func(ctx context.Context, id uint, detailed bool)) *Store_GetGithubCredentials_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint), args[2].(bool)) - }) - return _c -} - -func (_c *Store_GetGithubCredentials_Call) Return(_a0 params.ForgeCredentials, _a1 error) *Store_GetGithubCredentials_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_GetGithubCredentials_Call) RunAndReturn(run func(context.Context, uint, bool) (params.ForgeCredentials, error)) *Store_GetGithubCredentials_Call { - _c.Call.Return(run) - return _c -} - // GetGithubCredentialsByName provides a mock function with given fields: ctx, name, detailed -func (_m *Store) GetGithubCredentialsByName(ctx context.Context, name string, detailed bool) (params.ForgeCredentials, error) { +func (_m *Store) GetGithubCredentialsByName(ctx context.Context, name string, detailed bool) (params.GithubCredentials, error) { ret := _m.Called(ctx, name, detailed) if len(ret) == 0 { panic("no return value specified for GetGithubCredentialsByName") } - var r0 params.ForgeCredentials + var r0 params.GithubCredentials var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, bool) (params.ForgeCredentials, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, bool) (params.GithubCredentials, error)); ok { return rf(ctx, name, detailed) } - if rf, ok := ret.Get(0).(func(context.Context, string, bool) params.ForgeCredentials); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, bool) params.GithubCredentials); ok { r0 = rf(ctx, name, detailed) } else { - r0 = ret.Get(0).(params.ForgeCredentials) + r0 = ret.Get(0).(params.GithubCredentials) } if rf, ok := ret.Get(1).(func(context.Context, string, bool) error); ok { @@ -2246,53 +708,23 @@ func (_m *Store) GetGithubCredentialsByName(ctx context.Context, name string, de return r0, r1 } -// Store_GetGithubCredentialsByName_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetGithubCredentialsByName' -type Store_GetGithubCredentialsByName_Call struct { - *mock.Call -} - -// GetGithubCredentialsByName is a helper method to define mock.On call -// - ctx context.Context -// - name string -// - detailed bool -func (_e *Store_Expecter) GetGithubCredentialsByName(ctx interface{}, name interface{}, detailed interface{}) *Store_GetGithubCredentialsByName_Call { - return &Store_GetGithubCredentialsByName_Call{Call: _e.mock.On("GetGithubCredentialsByName", ctx, name, detailed)} -} - -func (_c *Store_GetGithubCredentialsByName_Call) Run(run func(ctx context.Context, name string, detailed bool)) *Store_GetGithubCredentialsByName_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(bool)) - }) - return _c -} - -func (_c *Store_GetGithubCredentialsByName_Call) Return(_a0 params.ForgeCredentials, _a1 error) *Store_GetGithubCredentialsByName_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_GetGithubCredentialsByName_Call) RunAndReturn(run func(context.Context, string, bool) (params.ForgeCredentials, error)) *Store_GetGithubCredentialsByName_Call { - _c.Call.Return(run) - return _c -} - // GetGithubEndpoint provides a mock function with given fields: ctx, name -func (_m *Store) GetGithubEndpoint(ctx context.Context, name string) (params.ForgeEndpoint, error) { +func (_m *Store) GetGithubEndpoint(ctx context.Context, name string) (params.GithubEndpoint, error) { ret := _m.Called(ctx, name) if len(ret) == 0 { panic("no return value specified for GetGithubEndpoint") } - var r0 params.ForgeEndpoint + var r0 params.GithubEndpoint var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (params.ForgeEndpoint, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, string) (params.GithubEndpoint, error)); ok { return rf(ctx, name) } - if rf, ok := ret.Get(0).(func(context.Context, string) params.ForgeEndpoint); ok { + if rf, ok := ret.Get(0).(func(context.Context, string) params.GithubEndpoint); ok { r0 = rf(ctx, name) } else { - r0 = ret.Get(0).(params.ForgeEndpoint) + r0 = ret.Get(0).(params.GithubEndpoint) } if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { @@ -2304,56 +736,27 @@ func (_m *Store) GetGithubEndpoint(ctx context.Context, name string) (params.For return r0, r1 } -// Store_GetGithubEndpoint_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetGithubEndpoint' -type Store_GetGithubEndpoint_Call struct { - *mock.Call -} - -// GetGithubEndpoint is a helper method to define mock.On call -// - ctx context.Context -// - name string -func (_e *Store_Expecter) GetGithubEndpoint(ctx interface{}, name interface{}) *Store_GetGithubEndpoint_Call { - return &Store_GetGithubEndpoint_Call{Call: _e.mock.On("GetGithubEndpoint", ctx, name)} -} - -func (_c *Store_GetGithubEndpoint_Call) Run(run func(ctx context.Context, name string)) *Store_GetGithubEndpoint_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) - }) - return _c -} - -func (_c *Store_GetGithubEndpoint_Call) Return(_a0 params.ForgeEndpoint, _a1 error) *Store_GetGithubEndpoint_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_GetGithubEndpoint_Call) RunAndReturn(run func(context.Context, string) (params.ForgeEndpoint, error)) *Store_GetGithubEndpoint_Call { - _c.Call.Return(run) - return _c -} - -// GetInstance provides a mock function with given fields: ctx, instanceNameOrID -func (_m *Store) GetInstance(ctx context.Context, instanceNameOrID string) (params.Instance, error) { - ret := _m.Called(ctx, instanceNameOrID) +// GetInstanceByName provides a mock function with given fields: ctx, instanceName +func (_m *Store) GetInstanceByName(ctx context.Context, instanceName string) (params.Instance, error) { + ret := _m.Called(ctx, instanceName) if len(ret) == 0 { - panic("no return value specified for GetInstance") + panic("no return value specified for GetInstanceByName") } var r0 params.Instance var r1 error if rf, ok := ret.Get(0).(func(context.Context, string) (params.Instance, error)); ok { - return rf(ctx, instanceNameOrID) + return rf(ctx, instanceName) } if rf, ok := ret.Get(0).(func(context.Context, string) params.Instance); ok { - r0 = rf(ctx, instanceNameOrID) + r0 = rf(ctx, instanceName) } else { r0 = ret.Get(0).(params.Instance) } if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, instanceNameOrID) + r1 = rf(ctx, instanceName) } else { r1 = ret.Error(1) } @@ -2361,35 +764,6 @@ func (_m *Store) GetInstance(ctx context.Context, instanceNameOrID string) (para return r0, r1 } -// Store_GetInstance_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetInstance' -type Store_GetInstance_Call struct { - *mock.Call -} - -// GetInstance is a helper method to define mock.On call -// - ctx context.Context -// - instanceNameOrID string -func (_e *Store_Expecter) GetInstance(ctx interface{}, instanceNameOrID interface{}) *Store_GetInstance_Call { - return &Store_GetInstance_Call{Call: _e.mock.On("GetInstance", ctx, instanceNameOrID)} -} - -func (_c *Store_GetInstance_Call) Run(run func(ctx context.Context, instanceNameOrID string)) *Store_GetInstance_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) - }) - return _c -} - -func (_c *Store_GetInstance_Call) Return(_a0 params.Instance, _a1 error) *Store_GetInstance_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_GetInstance_Call) RunAndReturn(run func(context.Context, string) (params.Instance, error)) *Store_GetInstance_Call { - _c.Call.Return(run) - return _c -} - // GetJobByID provides a mock function with given fields: ctx, jobID func (_m *Store) GetJobByID(ctx context.Context, jobID int64) (params.Job, error) { ret := _m.Called(ctx, jobID) @@ -2418,35 +792,6 @@ func (_m *Store) GetJobByID(ctx context.Context, jobID int64) (params.Job, error return r0, r1 } -// Store_GetJobByID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetJobByID' -type Store_GetJobByID_Call struct { - *mock.Call -} - -// GetJobByID is a helper method to define mock.On call -// - ctx context.Context -// - jobID int64 -func (_e *Store_Expecter) GetJobByID(ctx interface{}, jobID interface{}) *Store_GetJobByID_Call { - return &Store_GetJobByID_Call{Call: _e.mock.On("GetJobByID", ctx, jobID)} -} - -func (_c *Store_GetJobByID_Call) Run(run func(ctx context.Context, jobID int64)) *Store_GetJobByID_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(int64)) - }) - return _c -} - -func (_c *Store_GetJobByID_Call) Return(_a0 params.Job, _a1 error) *Store_GetJobByID_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_GetJobByID_Call) RunAndReturn(run func(context.Context, int64) (params.Job, error)) *Store_GetJobByID_Call { - _c.Call.Return(run) - return _c -} - // GetOrganization provides a mock function with given fields: ctx, name, endpointName func (_m *Store) GetOrganization(ctx context.Context, name string, endpointName string) (params.Organization, error) { ret := _m.Called(ctx, name, endpointName) @@ -2475,36 +820,6 @@ func (_m *Store) GetOrganization(ctx context.Context, name string, endpointName return r0, r1 } -// Store_GetOrganization_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetOrganization' -type Store_GetOrganization_Call struct { - *mock.Call -} - -// GetOrganization is a helper method to define mock.On call -// - ctx context.Context -// - name string -// - endpointName string -func (_e *Store_Expecter) GetOrganization(ctx interface{}, name interface{}, endpointName interface{}) *Store_GetOrganization_Call { - return &Store_GetOrganization_Call{Call: _e.mock.On("GetOrganization", ctx, name, endpointName)} -} - -func (_c *Store_GetOrganization_Call) Run(run func(ctx context.Context, name string, endpointName string)) *Store_GetOrganization_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(string)) - }) - return _c -} - -func (_c *Store_GetOrganization_Call) Return(_a0 params.Organization, _a1 error) *Store_GetOrganization_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_GetOrganization_Call) RunAndReturn(run func(context.Context, string, string) (params.Organization, error)) *Store_GetOrganization_Call { - _c.Call.Return(run) - return _c -} - // GetOrganizationByID provides a mock function with given fields: ctx, orgID func (_m *Store) GetOrganizationByID(ctx context.Context, orgID string) (params.Organization, error) { ret := _m.Called(ctx, orgID) @@ -2533,35 +848,6 @@ func (_m *Store) GetOrganizationByID(ctx context.Context, orgID string) (params. return r0, r1 } -// Store_GetOrganizationByID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetOrganizationByID' -type Store_GetOrganizationByID_Call struct { - *mock.Call -} - -// GetOrganizationByID is a helper method to define mock.On call -// - ctx context.Context -// - orgID string -func (_e *Store_Expecter) GetOrganizationByID(ctx interface{}, orgID interface{}) *Store_GetOrganizationByID_Call { - return &Store_GetOrganizationByID_Call{Call: _e.mock.On("GetOrganizationByID", ctx, orgID)} -} - -func (_c *Store_GetOrganizationByID_Call) Run(run func(ctx context.Context, orgID string)) *Store_GetOrganizationByID_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) - }) - return _c -} - -func (_c *Store_GetOrganizationByID_Call) Return(_a0 params.Organization, _a1 error) *Store_GetOrganizationByID_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_GetOrganizationByID_Call) RunAndReturn(run func(context.Context, string) (params.Organization, error)) *Store_GetOrganizationByID_Call { - _c.Call.Return(run) - return _c -} - // GetPoolByID provides a mock function with given fields: ctx, poolID func (_m *Store) GetPoolByID(ctx context.Context, poolID string) (params.Pool, error) { ret := _m.Called(ctx, poolID) @@ -2590,33 +876,32 @@ func (_m *Store) GetPoolByID(ctx context.Context, poolID string) (params.Pool, e return r0, r1 } -// Store_GetPoolByID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetPoolByID' -type Store_GetPoolByID_Call struct { - *mock.Call -} +// GetPoolInstanceByName provides a mock function with given fields: ctx, poolID, instanceName +func (_m *Store) GetPoolInstanceByName(ctx context.Context, poolID string, instanceName string) (params.Instance, error) { + ret := _m.Called(ctx, poolID, instanceName) -// GetPoolByID is a helper method to define mock.On call -// - ctx context.Context -// - poolID string -func (_e *Store_Expecter) GetPoolByID(ctx interface{}, poolID interface{}) *Store_GetPoolByID_Call { - return &Store_GetPoolByID_Call{Call: _e.mock.On("GetPoolByID", ctx, poolID)} -} + if len(ret) == 0 { + panic("no return value specified for GetPoolInstanceByName") + } -func (_c *Store_GetPoolByID_Call) Run(run func(ctx context.Context, poolID string)) *Store_GetPoolByID_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) - }) - return _c -} + var r0 params.Instance + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, string) (params.Instance, error)); ok { + return rf(ctx, poolID, instanceName) + } + if rf, ok := ret.Get(0).(func(context.Context, string, string) params.Instance); ok { + r0 = rf(ctx, poolID, instanceName) + } else { + r0 = ret.Get(0).(params.Instance) + } -func (_c *Store_GetPoolByID_Call) Return(_a0 params.Pool, _a1 error) *Store_GetPoolByID_Call { - _c.Call.Return(_a0, _a1) - return _c -} + if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { + r1 = rf(ctx, poolID, instanceName) + } else { + r1 = ret.Error(1) + } -func (_c *Store_GetPoolByID_Call) RunAndReturn(run func(context.Context, string) (params.Pool, error)) *Store_GetPoolByID_Call { - _c.Call.Return(run) - return _c + return r0, r1 } // GetRepository provides a mock function with given fields: ctx, owner, name, endpointName @@ -2647,37 +932,6 @@ func (_m *Store) GetRepository(ctx context.Context, owner string, name string, e return r0, r1 } -// Store_GetRepository_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetRepository' -type Store_GetRepository_Call struct { - *mock.Call -} - -// GetRepository is a helper method to define mock.On call -// - ctx context.Context -// - owner string -// - name string -// - endpointName string -func (_e *Store_Expecter) GetRepository(ctx interface{}, owner interface{}, name interface{}, endpointName interface{}) *Store_GetRepository_Call { - return &Store_GetRepository_Call{Call: _e.mock.On("GetRepository", ctx, owner, name, endpointName)} -} - -func (_c *Store_GetRepository_Call) Run(run func(ctx context.Context, owner string, name string, endpointName string)) *Store_GetRepository_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(string), args[3].(string)) - }) - return _c -} - -func (_c *Store_GetRepository_Call) Return(_a0 params.Repository, _a1 error) *Store_GetRepository_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_GetRepository_Call) RunAndReturn(run func(context.Context, string, string, string) (params.Repository, error)) *Store_GetRepository_Call { - _c.Call.Return(run) - return _c -} - // GetRepositoryByID provides a mock function with given fields: ctx, repoID func (_m *Store) GetRepositoryByID(ctx context.Context, repoID string) (params.Repository, error) { ret := _m.Called(ctx, repoID) @@ -2706,92 +960,6 @@ func (_m *Store) GetRepositoryByID(ctx context.Context, repoID string) (params.R return r0, r1 } -// Store_GetRepositoryByID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetRepositoryByID' -type Store_GetRepositoryByID_Call struct { - *mock.Call -} - -// GetRepositoryByID is a helper method to define mock.On call -// - ctx context.Context -// - repoID string -func (_e *Store_Expecter) GetRepositoryByID(ctx interface{}, repoID interface{}) *Store_GetRepositoryByID_Call { - return &Store_GetRepositoryByID_Call{Call: _e.mock.On("GetRepositoryByID", ctx, repoID)} -} - -func (_c *Store_GetRepositoryByID_Call) Run(run func(ctx context.Context, repoID string)) *Store_GetRepositoryByID_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) - }) - return _c -} - -func (_c *Store_GetRepositoryByID_Call) Return(_a0 params.Repository, _a1 error) *Store_GetRepositoryByID_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_GetRepositoryByID_Call) RunAndReturn(run func(context.Context, string) (params.Repository, error)) *Store_GetRepositoryByID_Call { - _c.Call.Return(run) - return _c -} - -// GetScaleSetByID provides a mock function with given fields: ctx, scaleSet -func (_m *Store) GetScaleSetByID(ctx context.Context, scaleSet uint) (params.ScaleSet, error) { - ret := _m.Called(ctx, scaleSet) - - if len(ret) == 0 { - panic("no return value specified for GetScaleSetByID") - } - - var r0 params.ScaleSet - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, uint) (params.ScaleSet, error)); ok { - return rf(ctx, scaleSet) - } - if rf, ok := ret.Get(0).(func(context.Context, uint) params.ScaleSet); ok { - r0 = rf(ctx, scaleSet) - } else { - r0 = ret.Get(0).(params.ScaleSet) - } - - if rf, ok := ret.Get(1).(func(context.Context, uint) error); ok { - r1 = rf(ctx, scaleSet) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Store_GetScaleSetByID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetScaleSetByID' -type Store_GetScaleSetByID_Call struct { - *mock.Call -} - -// GetScaleSetByID is a helper method to define mock.On call -// - ctx context.Context -// - scaleSet uint -func (_e *Store_Expecter) GetScaleSetByID(ctx interface{}, scaleSet interface{}) *Store_GetScaleSetByID_Call { - return &Store_GetScaleSetByID_Call{Call: _e.mock.On("GetScaleSetByID", ctx, scaleSet)} -} - -func (_c *Store_GetScaleSetByID_Call) Run(run func(ctx context.Context, scaleSet uint)) *Store_GetScaleSetByID_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint)) - }) - return _c -} - -func (_c *Store_GetScaleSetByID_Call) Return(_a0 params.ScaleSet, _a1 error) *Store_GetScaleSetByID_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_GetScaleSetByID_Call) RunAndReturn(run func(context.Context, uint) (params.ScaleSet, error)) *Store_GetScaleSetByID_Call { - _c.Call.Return(run) - return _c -} - // GetUser provides a mock function with given fields: ctx, user func (_m *Store) GetUser(ctx context.Context, user string) (params.User, error) { ret := _m.Called(ctx, user) @@ -2820,35 +988,6 @@ func (_m *Store) GetUser(ctx context.Context, user string) (params.User, error) return r0, r1 } -// Store_GetUser_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetUser' -type Store_GetUser_Call struct { - *mock.Call -} - -// GetUser is a helper method to define mock.On call -// - ctx context.Context -// - user string -func (_e *Store_Expecter) GetUser(ctx interface{}, user interface{}) *Store_GetUser_Call { - return &Store_GetUser_Call{Call: _e.mock.On("GetUser", ctx, user)} -} - -func (_c *Store_GetUser_Call) Run(run func(ctx context.Context, user string)) *Store_GetUser_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) - }) - return _c -} - -func (_c *Store_GetUser_Call) Return(_a0 params.User, _a1 error) *Store_GetUser_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_GetUser_Call) RunAndReturn(run func(context.Context, string) (params.User, error)) *Store_GetUser_Call { - _c.Call.Return(run) - return _c -} - // GetUserByID provides a mock function with given fields: ctx, userID func (_m *Store) GetUserByID(ctx context.Context, userID string) (params.User, error) { ret := _m.Called(ctx, userID) @@ -2877,35 +1016,6 @@ func (_m *Store) GetUserByID(ctx context.Context, userID string) (params.User, e return r0, r1 } -// Store_GetUserByID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetUserByID' -type Store_GetUserByID_Call struct { - *mock.Call -} - -// GetUserByID is a helper method to define mock.On call -// - ctx context.Context -// - userID string -func (_e *Store_Expecter) GetUserByID(ctx interface{}, userID interface{}) *Store_GetUserByID_Call { - return &Store_GetUserByID_Call{Call: _e.mock.On("GetUserByID", ctx, userID)} -} - -func (_c *Store_GetUserByID_Call) Run(run func(ctx context.Context, userID string)) *Store_GetUserByID_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) - }) - return _c -} - -func (_c *Store_GetUserByID_Call) Return(_a0 params.User, _a1 error) *Store_GetUserByID_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_GetUserByID_Call) RunAndReturn(run func(context.Context, string) (params.User, error)) *Store_GetUserByID_Call { - _c.Call.Return(run) - return _c -} - // HasAdminUser provides a mock function with given fields: ctx func (_m *Store) HasAdminUser(ctx context.Context) bool { ret := _m.Called(ctx) @@ -2924,34 +1034,6 @@ func (_m *Store) HasAdminUser(ctx context.Context) bool { return r0 } -// Store_HasAdminUser_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'HasAdminUser' -type Store_HasAdminUser_Call struct { - *mock.Call -} - -// HasAdminUser is a helper method to define mock.On call -// - ctx context.Context -func (_e *Store_Expecter) HasAdminUser(ctx interface{}) *Store_HasAdminUser_Call { - return &Store_HasAdminUser_Call{Call: _e.mock.On("HasAdminUser", ctx)} -} - -func (_c *Store_HasAdminUser_Call) Run(run func(ctx context.Context)) *Store_HasAdminUser_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *Store_HasAdminUser_Call) Return(_a0 bool) *Store_HasAdminUser_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Store_HasAdminUser_Call) RunAndReturn(run func(context.Context) bool) *Store_HasAdminUser_Call { - _c.Call.Return(run) - return _c -} - // InitController provides a mock function with no fields func (_m *Store) InitController() (params.ControllerInfo, error) { ret := _m.Called() @@ -2980,33 +1062,6 @@ func (_m *Store) InitController() (params.ControllerInfo, error) { return r0, r1 } -// Store_InitController_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'InitController' -type Store_InitController_Call struct { - *mock.Call -} - -// InitController is a helper method to define mock.On call -func (_e *Store_Expecter) InitController() *Store_InitController_Call { - return &Store_InitController_Call{Call: _e.mock.On("InitController")} -} - -func (_c *Store_InitController_Call) Run(run func()) *Store_InitController_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *Store_InitController_Call) Return(_a0 params.ControllerInfo, _a1 error) *Store_InitController_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_InitController_Call) RunAndReturn(run func() (params.ControllerInfo, error)) *Store_InitController_Call { - _c.Call.Return(run) - return _c -} - // ListAllInstances provides a mock function with given fields: ctx func (_m *Store) ListAllInstances(ctx context.Context) ([]params.Instance, error) { ret := _m.Called(ctx) @@ -3037,34 +1092,6 @@ func (_m *Store) ListAllInstances(ctx context.Context) ([]params.Instance, error return r0, r1 } -// Store_ListAllInstances_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListAllInstances' -type Store_ListAllInstances_Call struct { - *mock.Call -} - -// ListAllInstances is a helper method to define mock.On call -// - ctx context.Context -func (_e *Store_Expecter) ListAllInstances(ctx interface{}) *Store_ListAllInstances_Call { - return &Store_ListAllInstances_Call{Call: _e.mock.On("ListAllInstances", ctx)} -} - -func (_c *Store_ListAllInstances_Call) Run(run func(ctx context.Context)) *Store_ListAllInstances_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *Store_ListAllInstances_Call) Return(_a0 []params.Instance, _a1 error) *Store_ListAllInstances_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_ListAllInstances_Call) RunAndReturn(run func(context.Context) ([]params.Instance, error)) *Store_ListAllInstances_Call { - _c.Call.Return(run) - return _c -} - // ListAllJobs provides a mock function with given fields: ctx func (_m *Store) ListAllJobs(ctx context.Context) ([]params.Job, error) { ret := _m.Called(ctx) @@ -3095,34 +1122,6 @@ func (_m *Store) ListAllJobs(ctx context.Context) ([]params.Job, error) { return r0, r1 } -// Store_ListAllJobs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListAllJobs' -type Store_ListAllJobs_Call struct { - *mock.Call -} - -// ListAllJobs is a helper method to define mock.On call -// - ctx context.Context -func (_e *Store_Expecter) ListAllJobs(ctx interface{}) *Store_ListAllJobs_Call { - return &Store_ListAllJobs_Call{Call: _e.mock.On("ListAllJobs", ctx)} -} - -func (_c *Store_ListAllJobs_Call) Run(run func(ctx context.Context)) *Store_ListAllJobs_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *Store_ListAllJobs_Call) Return(_a0 []params.Job, _a1 error) *Store_ListAllJobs_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_ListAllJobs_Call) RunAndReturn(run func(context.Context) ([]params.Job, error)) *Store_ListAllJobs_Call { - _c.Call.Return(run) - return _c -} - // ListAllPools provides a mock function with given fields: ctx func (_m *Store) ListAllPools(ctx context.Context) ([]params.Pool, error) { ret := _m.Called(ctx) @@ -3153,52 +1152,24 @@ func (_m *Store) ListAllPools(ctx context.Context) ([]params.Pool, error) { return r0, r1 } -// Store_ListAllPools_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListAllPools' -type Store_ListAllPools_Call struct { - *mock.Call -} - -// ListAllPools is a helper method to define mock.On call -// - ctx context.Context -func (_e *Store_Expecter) ListAllPools(ctx interface{}) *Store_ListAllPools_Call { - return &Store_ListAllPools_Call{Call: _e.mock.On("ListAllPools", ctx)} -} - -func (_c *Store_ListAllPools_Call) Run(run func(ctx context.Context)) *Store_ListAllPools_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *Store_ListAllPools_Call) Return(_a0 []params.Pool, _a1 error) *Store_ListAllPools_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_ListAllPools_Call) RunAndReturn(run func(context.Context) ([]params.Pool, error)) *Store_ListAllPools_Call { - _c.Call.Return(run) - return _c -} - -// ListAllScaleSets provides a mock function with given fields: ctx -func (_m *Store) ListAllScaleSets(ctx context.Context) ([]params.ScaleSet, error) { +// ListEnterprises provides a mock function with given fields: ctx +func (_m *Store) ListEnterprises(ctx context.Context) ([]params.Enterprise, error) { ret := _m.Called(ctx) if len(ret) == 0 { - panic("no return value specified for ListAllScaleSets") + panic("no return value specified for ListEnterprises") } - var r0 []params.ScaleSet + var r0 []params.Enterprise var r1 error - if rf, ok := ret.Get(0).(func(context.Context) ([]params.ScaleSet, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context) ([]params.Enterprise, error)); ok { return rf(ctx) } - if rf, ok := ret.Get(0).(func(context.Context) []params.ScaleSet); ok { + if rf, ok := ret.Get(0).(func(context.Context) []params.Enterprise); ok { r0 = rf(ctx) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]params.ScaleSet) + r0 = ret.Get(0).([]params.Enterprise) } } @@ -3211,95 +1182,8 @@ func (_m *Store) ListAllScaleSets(ctx context.Context) ([]params.ScaleSet, error return r0, r1 } -// Store_ListAllScaleSets_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListAllScaleSets' -type Store_ListAllScaleSets_Call struct { - *mock.Call -} - -// ListAllScaleSets is a helper method to define mock.On call -// - ctx context.Context -func (_e *Store_Expecter) ListAllScaleSets(ctx interface{}) *Store_ListAllScaleSets_Call { - return &Store_ListAllScaleSets_Call{Call: _e.mock.On("ListAllScaleSets", ctx)} -} - -func (_c *Store_ListAllScaleSets_Call) Run(run func(ctx context.Context)) *Store_ListAllScaleSets_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *Store_ListAllScaleSets_Call) Return(_a0 []params.ScaleSet, _a1 error) *Store_ListAllScaleSets_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_ListAllScaleSets_Call) RunAndReturn(run func(context.Context) ([]params.ScaleSet, error)) *Store_ListAllScaleSets_Call { - _c.Call.Return(run) - return _c -} - -// ListEnterprises provides a mock function with given fields: ctx, filter -func (_m *Store) ListEnterprises(ctx context.Context, filter params.EnterpriseFilter) ([]params.Enterprise, error) { - ret := _m.Called(ctx, filter) - - if len(ret) == 0 { - panic("no return value specified for ListEnterprises") - } - - var r0 []params.Enterprise - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.EnterpriseFilter) ([]params.Enterprise, error)); ok { - return rf(ctx, filter) - } - if rf, ok := ret.Get(0).(func(context.Context, params.EnterpriseFilter) []params.Enterprise); ok { - r0 = rf(ctx, filter) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]params.Enterprise) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, params.EnterpriseFilter) error); ok { - r1 = rf(ctx, filter) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Store_ListEnterprises_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListEnterprises' -type Store_ListEnterprises_Call struct { - *mock.Call -} - -// ListEnterprises is a helper method to define mock.On call -// - ctx context.Context -// - filter params.EnterpriseFilter -func (_e *Store_Expecter) ListEnterprises(ctx interface{}, filter interface{}) *Store_ListEnterprises_Call { - return &Store_ListEnterprises_Call{Call: _e.mock.On("ListEnterprises", ctx, filter)} -} - -func (_c *Store_ListEnterprises_Call) Run(run func(ctx context.Context, filter params.EnterpriseFilter)) *Store_ListEnterprises_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.EnterpriseFilter)) - }) - return _c -} - -func (_c *Store_ListEnterprises_Call) Return(_a0 []params.Enterprise, _a1 error) *Store_ListEnterprises_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_ListEnterprises_Call) RunAndReturn(run func(context.Context, params.EnterpriseFilter) ([]params.Enterprise, error)) *Store_ListEnterprises_Call { - _c.Call.Return(run) - return _c -} - // ListEntityInstances provides a mock function with given fields: ctx, entity -func (_m *Store) ListEntityInstances(ctx context.Context, entity params.ForgeEntity) ([]params.Instance, error) { +func (_m *Store) ListEntityInstances(ctx context.Context, entity params.GithubEntity) ([]params.Instance, error) { ret := _m.Called(ctx, entity) if len(ret) == 0 { @@ -3308,10 +1192,10 @@ func (_m *Store) ListEntityInstances(ctx context.Context, entity params.ForgeEnt var r0 []params.Instance var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity) ([]params.Instance, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity) ([]params.Instance, error)); ok { return rf(ctx, entity) } - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity) []params.Instance); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity) []params.Instance); ok { r0 = rf(ctx, entity) } else { if ret.Get(0) != nil { @@ -3319,7 +1203,7 @@ func (_m *Store) ListEntityInstances(ctx context.Context, entity params.ForgeEnt } } - if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntity) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, params.GithubEntity) error); ok { r1 = rf(ctx, entity) } else { r1 = ret.Error(1) @@ -3328,37 +1212,8 @@ func (_m *Store) ListEntityInstances(ctx context.Context, entity params.ForgeEnt return r0, r1 } -// Store_ListEntityInstances_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListEntityInstances' -type Store_ListEntityInstances_Call struct { - *mock.Call -} - -// ListEntityInstances is a helper method to define mock.On call -// - ctx context.Context -// - entity params.ForgeEntity -func (_e *Store_Expecter) ListEntityInstances(ctx interface{}, entity interface{}) *Store_ListEntityInstances_Call { - return &Store_ListEntityInstances_Call{Call: _e.mock.On("ListEntityInstances", ctx, entity)} -} - -func (_c *Store_ListEntityInstances_Call) Run(run func(ctx context.Context, entity params.ForgeEntity)) *Store_ListEntityInstances_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.ForgeEntity)) - }) - return _c -} - -func (_c *Store_ListEntityInstances_Call) Return(_a0 []params.Instance, _a1 error) *Store_ListEntityInstances_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_ListEntityInstances_Call) RunAndReturn(run func(context.Context, params.ForgeEntity) ([]params.Instance, error)) *Store_ListEntityInstances_Call { - _c.Call.Return(run) - return _c -} - // ListEntityJobsByStatus provides a mock function with given fields: ctx, entityType, entityID, status -func (_m *Store) ListEntityJobsByStatus(ctx context.Context, entityType params.ForgeEntityType, entityID string, status params.JobStatus) ([]params.Job, error) { +func (_m *Store) ListEntityJobsByStatus(ctx context.Context, entityType params.GithubEntityType, entityID string, status params.JobStatus) ([]params.Job, error) { ret := _m.Called(ctx, entityType, entityID, status) if len(ret) == 0 { @@ -3367,10 +1222,10 @@ func (_m *Store) ListEntityJobsByStatus(ctx context.Context, entityType params.F var r0 []params.Job var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntityType, string, params.JobStatus) ([]params.Job, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntityType, string, params.JobStatus) ([]params.Job, error)); ok { return rf(ctx, entityType, entityID, status) } - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntityType, string, params.JobStatus) []params.Job); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntityType, string, params.JobStatus) []params.Job); ok { r0 = rf(ctx, entityType, entityID, status) } else { if ret.Get(0) != nil { @@ -3378,7 +1233,7 @@ func (_m *Store) ListEntityJobsByStatus(ctx context.Context, entityType params.F } } - if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntityType, string, params.JobStatus) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, params.GithubEntityType, string, params.JobStatus) error); ok { r1 = rf(ctx, entityType, entityID, status) } else { r1 = ret.Error(1) @@ -3387,39 +1242,8 @@ func (_m *Store) ListEntityJobsByStatus(ctx context.Context, entityType params.F return r0, r1 } -// Store_ListEntityJobsByStatus_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListEntityJobsByStatus' -type Store_ListEntityJobsByStatus_Call struct { - *mock.Call -} - -// ListEntityJobsByStatus is a helper method to define mock.On call -// - ctx context.Context -// - entityType params.ForgeEntityType -// - entityID string -// - status params.JobStatus -func (_e *Store_Expecter) ListEntityJobsByStatus(ctx interface{}, entityType interface{}, entityID interface{}, status interface{}) *Store_ListEntityJobsByStatus_Call { - return &Store_ListEntityJobsByStatus_Call{Call: _e.mock.On("ListEntityJobsByStatus", ctx, entityType, entityID, status)} -} - -func (_c *Store_ListEntityJobsByStatus_Call) Run(run func(ctx context.Context, entityType params.ForgeEntityType, entityID string, status params.JobStatus)) *Store_ListEntityJobsByStatus_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.ForgeEntityType), args[2].(string), args[3].(params.JobStatus)) - }) - return _c -} - -func (_c *Store_ListEntityJobsByStatus_Call) Return(_a0 []params.Job, _a1 error) *Store_ListEntityJobsByStatus_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_ListEntityJobsByStatus_Call) RunAndReturn(run func(context.Context, params.ForgeEntityType, string, params.JobStatus) ([]params.Job, error)) *Store_ListEntityJobsByStatus_Call { - _c.Call.Return(run) - return _c -} - // ListEntityPools provides a mock function with given fields: ctx, entity -func (_m *Store) ListEntityPools(ctx context.Context, entity params.ForgeEntity) ([]params.Pool, error) { +func (_m *Store) ListEntityPools(ctx context.Context, entity params.GithubEntity) ([]params.Pool, error) { ret := _m.Called(ctx, entity) if len(ret) == 0 { @@ -3428,10 +1252,10 @@ func (_m *Store) ListEntityPools(ctx context.Context, entity params.ForgeEntity) var r0 []params.Pool var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity) ([]params.Pool, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity) ([]params.Pool, error)); ok { return rf(ctx, entity) } - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity) []params.Pool); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity) []params.Pool); ok { r0 = rf(ctx, entity) } else { if ret.Get(0) != nil { @@ -3439,7 +1263,7 @@ func (_m *Store) ListEntityPools(ctx context.Context, entity params.ForgeEntity) } } - if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntity) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, params.GithubEntity) error); ok { r1 = rf(ctx, entity) } else { r1 = ret.Error(1) @@ -3448,228 +1272,24 @@ func (_m *Store) ListEntityPools(ctx context.Context, entity params.ForgeEntity) return r0, r1 } -// Store_ListEntityPools_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListEntityPools' -type Store_ListEntityPools_Call struct { - *mock.Call -} - -// ListEntityPools is a helper method to define mock.On call -// - ctx context.Context -// - entity params.ForgeEntity -func (_e *Store_Expecter) ListEntityPools(ctx interface{}, entity interface{}) *Store_ListEntityPools_Call { - return &Store_ListEntityPools_Call{Call: _e.mock.On("ListEntityPools", ctx, entity)} -} - -func (_c *Store_ListEntityPools_Call) Run(run func(ctx context.Context, entity params.ForgeEntity)) *Store_ListEntityPools_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.ForgeEntity)) - }) - return _c -} - -func (_c *Store_ListEntityPools_Call) Return(_a0 []params.Pool, _a1 error) *Store_ListEntityPools_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_ListEntityPools_Call) RunAndReturn(run func(context.Context, params.ForgeEntity) ([]params.Pool, error)) *Store_ListEntityPools_Call { - _c.Call.Return(run) - return _c -} - -// ListEntityScaleSets provides a mock function with given fields: _a0, entity -func (_m *Store) ListEntityScaleSets(_a0 context.Context, entity params.ForgeEntity) ([]params.ScaleSet, error) { - ret := _m.Called(_a0, entity) - - if len(ret) == 0 { - panic("no return value specified for ListEntityScaleSets") - } - - var r0 []params.ScaleSet - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity) ([]params.ScaleSet, error)); ok { - return rf(_a0, entity) - } - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity) []params.ScaleSet); ok { - r0 = rf(_a0, entity) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]params.ScaleSet) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntity) error); ok { - r1 = rf(_a0, entity) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Store_ListEntityScaleSets_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListEntityScaleSets' -type Store_ListEntityScaleSets_Call struct { - *mock.Call -} - -// ListEntityScaleSets is a helper method to define mock.On call -// - _a0 context.Context -// - entity params.ForgeEntity -func (_e *Store_Expecter) ListEntityScaleSets(_a0 interface{}, entity interface{}) *Store_ListEntityScaleSets_Call { - return &Store_ListEntityScaleSets_Call{Call: _e.mock.On("ListEntityScaleSets", _a0, entity)} -} - -func (_c *Store_ListEntityScaleSets_Call) Run(run func(_a0 context.Context, entity params.ForgeEntity)) *Store_ListEntityScaleSets_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.ForgeEntity)) - }) - return _c -} - -func (_c *Store_ListEntityScaleSets_Call) Return(_a0 []params.ScaleSet, _a1 error) *Store_ListEntityScaleSets_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_ListEntityScaleSets_Call) RunAndReturn(run func(context.Context, params.ForgeEntity) ([]params.ScaleSet, error)) *Store_ListEntityScaleSets_Call { - _c.Call.Return(run) - return _c -} - -// ListGiteaCredentials provides a mock function with given fields: ctx -func (_m *Store) ListGiteaCredentials(ctx context.Context) ([]params.ForgeCredentials, error) { - ret := _m.Called(ctx) - - if len(ret) == 0 { - panic("no return value specified for ListGiteaCredentials") - } - - var r0 []params.ForgeCredentials - var r1 error - if rf, ok := ret.Get(0).(func(context.Context) ([]params.ForgeCredentials, error)); ok { - return rf(ctx) - } - if rf, ok := ret.Get(0).(func(context.Context) []params.ForgeCredentials); ok { - r0 = rf(ctx) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]params.ForgeCredentials) - } - } - - if rf, ok := ret.Get(1).(func(context.Context) error); ok { - r1 = rf(ctx) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Store_ListGiteaCredentials_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListGiteaCredentials' -type Store_ListGiteaCredentials_Call struct { - *mock.Call -} - -// ListGiteaCredentials is a helper method to define mock.On call -// - ctx context.Context -func (_e *Store_Expecter) ListGiteaCredentials(ctx interface{}) *Store_ListGiteaCredentials_Call { - return &Store_ListGiteaCredentials_Call{Call: _e.mock.On("ListGiteaCredentials", ctx)} -} - -func (_c *Store_ListGiteaCredentials_Call) Run(run func(ctx context.Context)) *Store_ListGiteaCredentials_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *Store_ListGiteaCredentials_Call) Return(_a0 []params.ForgeCredentials, _a1 error) *Store_ListGiteaCredentials_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_ListGiteaCredentials_Call) RunAndReturn(run func(context.Context) ([]params.ForgeCredentials, error)) *Store_ListGiteaCredentials_Call { - _c.Call.Return(run) - return _c -} - -// ListGiteaEndpoints provides a mock function with given fields: _a0 -func (_m *Store) ListGiteaEndpoints(_a0 context.Context) ([]params.ForgeEndpoint, error) { - ret := _m.Called(_a0) - - if len(ret) == 0 { - panic("no return value specified for ListGiteaEndpoints") - } - - var r0 []params.ForgeEndpoint - var r1 error - if rf, ok := ret.Get(0).(func(context.Context) ([]params.ForgeEndpoint, error)); ok { - return rf(_a0) - } - if rf, ok := ret.Get(0).(func(context.Context) []params.ForgeEndpoint); ok { - r0 = rf(_a0) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]params.ForgeEndpoint) - } - } - - if rf, ok := ret.Get(1).(func(context.Context) error); ok { - r1 = rf(_a0) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Store_ListGiteaEndpoints_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListGiteaEndpoints' -type Store_ListGiteaEndpoints_Call struct { - *mock.Call -} - -// ListGiteaEndpoints is a helper method to define mock.On call -// - _a0 context.Context -func (_e *Store_Expecter) ListGiteaEndpoints(_a0 interface{}) *Store_ListGiteaEndpoints_Call { - return &Store_ListGiteaEndpoints_Call{Call: _e.mock.On("ListGiteaEndpoints", _a0)} -} - -func (_c *Store_ListGiteaEndpoints_Call) Run(run func(_a0 context.Context)) *Store_ListGiteaEndpoints_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *Store_ListGiteaEndpoints_Call) Return(_a0 []params.ForgeEndpoint, _a1 error) *Store_ListGiteaEndpoints_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_ListGiteaEndpoints_Call) RunAndReturn(run func(context.Context) ([]params.ForgeEndpoint, error)) *Store_ListGiteaEndpoints_Call { - _c.Call.Return(run) - return _c -} - // ListGithubCredentials provides a mock function with given fields: ctx -func (_m *Store) ListGithubCredentials(ctx context.Context) ([]params.ForgeCredentials, error) { +func (_m *Store) ListGithubCredentials(ctx context.Context) ([]params.GithubCredentials, error) { ret := _m.Called(ctx) if len(ret) == 0 { panic("no return value specified for ListGithubCredentials") } - var r0 []params.ForgeCredentials + var r0 []params.GithubCredentials var r1 error - if rf, ok := ret.Get(0).(func(context.Context) ([]params.ForgeCredentials, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context) ([]params.GithubCredentials, error)); ok { return rf(ctx) } - if rf, ok := ret.Get(0).(func(context.Context) []params.ForgeCredentials); ok { + if rf, ok := ret.Get(0).(func(context.Context) []params.GithubCredentials); ok { r0 = rf(ctx) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]params.ForgeCredentials) + r0 = ret.Get(0).([]params.GithubCredentials) } } @@ -3682,52 +1302,24 @@ func (_m *Store) ListGithubCredentials(ctx context.Context) ([]params.ForgeCrede return r0, r1 } -// Store_ListGithubCredentials_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListGithubCredentials' -type Store_ListGithubCredentials_Call struct { - *mock.Call -} - -// ListGithubCredentials is a helper method to define mock.On call -// - ctx context.Context -func (_e *Store_Expecter) ListGithubCredentials(ctx interface{}) *Store_ListGithubCredentials_Call { - return &Store_ListGithubCredentials_Call{Call: _e.mock.On("ListGithubCredentials", ctx)} -} - -func (_c *Store_ListGithubCredentials_Call) Run(run func(ctx context.Context)) *Store_ListGithubCredentials_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *Store_ListGithubCredentials_Call) Return(_a0 []params.ForgeCredentials, _a1 error) *Store_ListGithubCredentials_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_ListGithubCredentials_Call) RunAndReturn(run func(context.Context) ([]params.ForgeCredentials, error)) *Store_ListGithubCredentials_Call { - _c.Call.Return(run) - return _c -} - // ListGithubEndpoints provides a mock function with given fields: ctx -func (_m *Store) ListGithubEndpoints(ctx context.Context) ([]params.ForgeEndpoint, error) { +func (_m *Store) ListGithubEndpoints(ctx context.Context) ([]params.GithubEndpoint, error) { ret := _m.Called(ctx) if len(ret) == 0 { panic("no return value specified for ListGithubEndpoints") } - var r0 []params.ForgeEndpoint + var r0 []params.GithubEndpoint var r1 error - if rf, ok := ret.Get(0).(func(context.Context) ([]params.ForgeEndpoint, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context) ([]params.GithubEndpoint, error)); ok { return rf(ctx) } - if rf, ok := ret.Get(0).(func(context.Context) []params.ForgeEndpoint); ok { + if rf, ok := ret.Get(0).(func(context.Context) []params.GithubEndpoint); ok { r0 = rf(ctx) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]params.ForgeEndpoint) + r0 = ret.Get(0).([]params.GithubEndpoint) } } @@ -3740,34 +1332,6 @@ func (_m *Store) ListGithubEndpoints(ctx context.Context) ([]params.ForgeEndpoin return r0, r1 } -// Store_ListGithubEndpoints_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListGithubEndpoints' -type Store_ListGithubEndpoints_Call struct { - *mock.Call -} - -// ListGithubEndpoints is a helper method to define mock.On call -// - ctx context.Context -func (_e *Store_Expecter) ListGithubEndpoints(ctx interface{}) *Store_ListGithubEndpoints_Call { - return &Store_ListGithubEndpoints_Call{Call: _e.mock.On("ListGithubEndpoints", ctx)} -} - -func (_c *Store_ListGithubEndpoints_Call) Run(run func(ctx context.Context)) *Store_ListGithubEndpoints_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *Store_ListGithubEndpoints_Call) Return(_a0 []params.ForgeEndpoint, _a1 error) *Store_ListGithubEndpoints_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_ListGithubEndpoints_Call) RunAndReturn(run func(context.Context) ([]params.ForgeEndpoint, error)) *Store_ListGithubEndpoints_Call { - _c.Call.Return(run) - return _c -} - // ListJobsByStatus provides a mock function with given fields: ctx, status func (_m *Store) ListJobsByStatus(ctx context.Context, status params.JobStatus) ([]params.Job, error) { ret := _m.Called(ctx, status) @@ -3798,38 +1362,9 @@ func (_m *Store) ListJobsByStatus(ctx context.Context, status params.JobStatus) return r0, r1 } -// Store_ListJobsByStatus_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListJobsByStatus' -type Store_ListJobsByStatus_Call struct { - *mock.Call -} - -// ListJobsByStatus is a helper method to define mock.On call -// - ctx context.Context -// - status params.JobStatus -func (_e *Store_Expecter) ListJobsByStatus(ctx interface{}, status interface{}) *Store_ListJobsByStatus_Call { - return &Store_ListJobsByStatus_Call{Call: _e.mock.On("ListJobsByStatus", ctx, status)} -} - -func (_c *Store_ListJobsByStatus_Call) Run(run func(ctx context.Context, status params.JobStatus)) *Store_ListJobsByStatus_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.JobStatus)) - }) - return _c -} - -func (_c *Store_ListJobsByStatus_Call) Return(_a0 []params.Job, _a1 error) *Store_ListJobsByStatus_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_ListJobsByStatus_Call) RunAndReturn(run func(context.Context, params.JobStatus) ([]params.Job, error)) *Store_ListJobsByStatus_Call { - _c.Call.Return(run) - return _c -} - -// ListOrganizations provides a mock function with given fields: ctx, filter -func (_m *Store) ListOrganizations(ctx context.Context, filter params.OrganizationFilter) ([]params.Organization, error) { - ret := _m.Called(ctx, filter) +// ListOrganizations provides a mock function with given fields: ctx +func (_m *Store) ListOrganizations(ctx context.Context) ([]params.Organization, error) { + ret := _m.Called(ctx) if len(ret) == 0 { panic("no return value specified for ListOrganizations") @@ -3837,19 +1372,19 @@ func (_m *Store) ListOrganizations(ctx context.Context, filter params.Organizati var r0 []params.Organization var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.OrganizationFilter) ([]params.Organization, error)); ok { - return rf(ctx, filter) + if rf, ok := ret.Get(0).(func(context.Context) ([]params.Organization, error)); ok { + return rf(ctx) } - if rf, ok := ret.Get(0).(func(context.Context, params.OrganizationFilter) []params.Organization); ok { - r0 = rf(ctx, filter) + if rf, ok := ret.Get(0).(func(context.Context) []params.Organization); ok { + r0 = rf(ctx) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]params.Organization) } } - if rf, ok := ret.Get(1).(func(context.Context, params.OrganizationFilter) error); ok { - r1 = rf(ctx, filter) + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(ctx) } else { r1 = ret.Error(1) } @@ -3857,35 +1392,6 @@ func (_m *Store) ListOrganizations(ctx context.Context, filter params.Organizati return r0, r1 } -// Store_ListOrganizations_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListOrganizations' -type Store_ListOrganizations_Call struct { - *mock.Call -} - -// ListOrganizations is a helper method to define mock.On call -// - ctx context.Context -// - filter params.OrganizationFilter -func (_e *Store_Expecter) ListOrganizations(ctx interface{}, filter interface{}) *Store_ListOrganizations_Call { - return &Store_ListOrganizations_Call{Call: _e.mock.On("ListOrganizations", ctx, filter)} -} - -func (_c *Store_ListOrganizations_Call) Run(run func(ctx context.Context, filter params.OrganizationFilter)) *Store_ListOrganizations_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.OrganizationFilter)) - }) - return _c -} - -func (_c *Store_ListOrganizations_Call) Return(_a0 []params.Organization, _a1 error) *Store_ListOrganizations_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_ListOrganizations_Call) RunAndReturn(run func(context.Context, params.OrganizationFilter) ([]params.Organization, error)) *Store_ListOrganizations_Call { - _c.Call.Return(run) - return _c -} - // ListPoolInstances provides a mock function with given fields: ctx, poolID func (_m *Store) ListPoolInstances(ctx context.Context, poolID string) ([]params.Instance, error) { ret := _m.Called(ctx, poolID) @@ -3916,38 +1422,9 @@ func (_m *Store) ListPoolInstances(ctx context.Context, poolID string) ([]params return r0, r1 } -// Store_ListPoolInstances_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListPoolInstances' -type Store_ListPoolInstances_Call struct { - *mock.Call -} - -// ListPoolInstances is a helper method to define mock.On call -// - ctx context.Context -// - poolID string -func (_e *Store_Expecter) ListPoolInstances(ctx interface{}, poolID interface{}) *Store_ListPoolInstances_Call { - return &Store_ListPoolInstances_Call{Call: _e.mock.On("ListPoolInstances", ctx, poolID)} -} - -func (_c *Store_ListPoolInstances_Call) Run(run func(ctx context.Context, poolID string)) *Store_ListPoolInstances_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) - }) - return _c -} - -func (_c *Store_ListPoolInstances_Call) Return(_a0 []params.Instance, _a1 error) *Store_ListPoolInstances_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_ListPoolInstances_Call) RunAndReturn(run func(context.Context, string) ([]params.Instance, error)) *Store_ListPoolInstances_Call { - _c.Call.Return(run) - return _c -} - -// ListRepositories provides a mock function with given fields: ctx, filter -func (_m *Store) ListRepositories(ctx context.Context, filter params.RepositoryFilter) ([]params.Repository, error) { - ret := _m.Called(ctx, filter) +// ListRepositories provides a mock function with given fields: ctx +func (_m *Store) ListRepositories(ctx context.Context) ([]params.Repository, error) { + ret := _m.Called(ctx) if len(ret) == 0 { panic("no return value specified for ListRepositories") @@ -3955,19 +1432,19 @@ func (_m *Store) ListRepositories(ctx context.Context, filter params.RepositoryF var r0 []params.Repository var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.RepositoryFilter) ([]params.Repository, error)); ok { - return rf(ctx, filter) + if rf, ok := ret.Get(0).(func(context.Context) ([]params.Repository, error)); ok { + return rf(ctx) } - if rf, ok := ret.Get(0).(func(context.Context, params.RepositoryFilter) []params.Repository); ok { - r0 = rf(ctx, filter) + if rf, ok := ret.Get(0).(func(context.Context) []params.Repository); ok { + r0 = rf(ctx) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]params.Repository) } } - if rf, ok := ret.Get(1).(func(context.Context, params.RepositoryFilter) error); ok { - r1 = rf(ctx, filter) + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(ctx) } else { r1 = ret.Error(1) } @@ -3975,94 +1452,6 @@ func (_m *Store) ListRepositories(ctx context.Context, filter params.RepositoryF return r0, r1 } -// Store_ListRepositories_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListRepositories' -type Store_ListRepositories_Call struct { - *mock.Call -} - -// ListRepositories is a helper method to define mock.On call -// - ctx context.Context -// - filter params.RepositoryFilter -func (_e *Store_Expecter) ListRepositories(ctx interface{}, filter interface{}) *Store_ListRepositories_Call { - return &Store_ListRepositories_Call{Call: _e.mock.On("ListRepositories", ctx, filter)} -} - -func (_c *Store_ListRepositories_Call) Run(run func(ctx context.Context, filter params.RepositoryFilter)) *Store_ListRepositories_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.RepositoryFilter)) - }) - return _c -} - -func (_c *Store_ListRepositories_Call) Return(_a0 []params.Repository, _a1 error) *Store_ListRepositories_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_ListRepositories_Call) RunAndReturn(run func(context.Context, params.RepositoryFilter) ([]params.Repository, error)) *Store_ListRepositories_Call { - _c.Call.Return(run) - return _c -} - -// ListScaleSetInstances provides a mock function with given fields: _a0, scalesetID -func (_m *Store) ListScaleSetInstances(_a0 context.Context, scalesetID uint) ([]params.Instance, error) { - ret := _m.Called(_a0, scalesetID) - - if len(ret) == 0 { - panic("no return value specified for ListScaleSetInstances") - } - - var r0 []params.Instance - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, uint) ([]params.Instance, error)); ok { - return rf(_a0, scalesetID) - } - if rf, ok := ret.Get(0).(func(context.Context, uint) []params.Instance); ok { - r0 = rf(_a0, scalesetID) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]params.Instance) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, uint) error); ok { - r1 = rf(_a0, scalesetID) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Store_ListScaleSetInstances_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListScaleSetInstances' -type Store_ListScaleSetInstances_Call struct { - *mock.Call -} - -// ListScaleSetInstances is a helper method to define mock.On call -// - _a0 context.Context -// - scalesetID uint -func (_e *Store_Expecter) ListScaleSetInstances(_a0 interface{}, scalesetID interface{}) *Store_ListScaleSetInstances_Call { - return &Store_ListScaleSetInstances_Call{Call: _e.mock.On("ListScaleSetInstances", _a0, scalesetID)} -} - -func (_c *Store_ListScaleSetInstances_Call) Run(run func(_a0 context.Context, scalesetID uint)) *Store_ListScaleSetInstances_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint)) - }) - return _c -} - -func (_c *Store_ListScaleSetInstances_Call) Return(_a0 []params.Instance, _a1 error) *Store_ListScaleSetInstances_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_ListScaleSetInstances_Call) RunAndReturn(run func(context.Context, uint) ([]params.Instance, error)) *Store_ListScaleSetInstances_Call { - _c.Call.Return(run) - return _c -} - // LockJob provides a mock function with given fields: ctx, jobID, entityID func (_m *Store) LockJob(ctx context.Context, jobID int64, entityID string) error { ret := _m.Called(ctx, jobID, entityID) @@ -4081,36 +1470,6 @@ func (_m *Store) LockJob(ctx context.Context, jobID int64, entityID string) erro return r0 } -// Store_LockJob_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'LockJob' -type Store_LockJob_Call struct { - *mock.Call -} - -// LockJob is a helper method to define mock.On call -// - ctx context.Context -// - jobID int64 -// - entityID string -func (_e *Store_Expecter) LockJob(ctx interface{}, jobID interface{}, entityID interface{}) *Store_LockJob_Call { - return &Store_LockJob_Call{Call: _e.mock.On("LockJob", ctx, jobID, entityID)} -} - -func (_c *Store_LockJob_Call) Run(run func(ctx context.Context, jobID int64, entityID string)) *Store_LockJob_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(int64), args[2].(string)) - }) - return _c -} - -func (_c *Store_LockJob_Call) Return(_a0 error) *Store_LockJob_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Store_LockJob_Call) RunAndReturn(run func(context.Context, int64, string) error) *Store_LockJob_Call { - _c.Call.Return(run) - return _c -} - // PoolInstanceCount provides a mock function with given fields: ctx, poolID func (_m *Store) PoolInstanceCount(ctx context.Context, poolID string) (int64, error) { ret := _m.Called(ctx, poolID) @@ -4139,131 +1498,6 @@ func (_m *Store) PoolInstanceCount(ctx context.Context, poolID string) (int64, e return r0, r1 } -// Store_PoolInstanceCount_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'PoolInstanceCount' -type Store_PoolInstanceCount_Call struct { - *mock.Call -} - -// PoolInstanceCount is a helper method to define mock.On call -// - ctx context.Context -// - poolID string -func (_e *Store_Expecter) PoolInstanceCount(ctx interface{}, poolID interface{}) *Store_PoolInstanceCount_Call { - return &Store_PoolInstanceCount_Call{Call: _e.mock.On("PoolInstanceCount", ctx, poolID)} -} - -func (_c *Store_PoolInstanceCount_Call) Run(run func(ctx context.Context, poolID string)) *Store_PoolInstanceCount_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) - }) - return _c -} - -func (_c *Store_PoolInstanceCount_Call) Return(_a0 int64, _a1 error) *Store_PoolInstanceCount_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_PoolInstanceCount_Call) RunAndReturn(run func(context.Context, string) (int64, error)) *Store_PoolInstanceCount_Call { - _c.Call.Return(run) - return _c -} - -// SetScaleSetDesiredRunnerCount provides a mock function with given fields: ctx, scaleSetID, desiredRunnerCount -func (_m *Store) SetScaleSetDesiredRunnerCount(ctx context.Context, scaleSetID uint, desiredRunnerCount int) error { - ret := _m.Called(ctx, scaleSetID, desiredRunnerCount) - - if len(ret) == 0 { - panic("no return value specified for SetScaleSetDesiredRunnerCount") - } - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, uint, int) error); ok { - r0 = rf(ctx, scaleSetID, desiredRunnerCount) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// Store_SetScaleSetDesiredRunnerCount_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetScaleSetDesiredRunnerCount' -type Store_SetScaleSetDesiredRunnerCount_Call struct { - *mock.Call -} - -// SetScaleSetDesiredRunnerCount is a helper method to define mock.On call -// - ctx context.Context -// - scaleSetID uint -// - desiredRunnerCount int -func (_e *Store_Expecter) SetScaleSetDesiredRunnerCount(ctx interface{}, scaleSetID interface{}, desiredRunnerCount interface{}) *Store_SetScaleSetDesiredRunnerCount_Call { - return &Store_SetScaleSetDesiredRunnerCount_Call{Call: _e.mock.On("SetScaleSetDesiredRunnerCount", ctx, scaleSetID, desiredRunnerCount)} -} - -func (_c *Store_SetScaleSetDesiredRunnerCount_Call) Run(run func(ctx context.Context, scaleSetID uint, desiredRunnerCount int)) *Store_SetScaleSetDesiredRunnerCount_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint), args[2].(int)) - }) - return _c -} - -func (_c *Store_SetScaleSetDesiredRunnerCount_Call) Return(_a0 error) *Store_SetScaleSetDesiredRunnerCount_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Store_SetScaleSetDesiredRunnerCount_Call) RunAndReturn(run func(context.Context, uint, int) error) *Store_SetScaleSetDesiredRunnerCount_Call { - _c.Call.Return(run) - return _c -} - -// SetScaleSetLastMessageID provides a mock function with given fields: ctx, scaleSetID, lastMessageID -func (_m *Store) SetScaleSetLastMessageID(ctx context.Context, scaleSetID uint, lastMessageID int64) error { - ret := _m.Called(ctx, scaleSetID, lastMessageID) - - if len(ret) == 0 { - panic("no return value specified for SetScaleSetLastMessageID") - } - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, uint, int64) error); ok { - r0 = rf(ctx, scaleSetID, lastMessageID) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// Store_SetScaleSetLastMessageID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetScaleSetLastMessageID' -type Store_SetScaleSetLastMessageID_Call struct { - *mock.Call -} - -// SetScaleSetLastMessageID is a helper method to define mock.On call -// - ctx context.Context -// - scaleSetID uint -// - lastMessageID int64 -func (_e *Store_Expecter) SetScaleSetLastMessageID(ctx interface{}, scaleSetID interface{}, lastMessageID interface{}) *Store_SetScaleSetLastMessageID_Call { - return &Store_SetScaleSetLastMessageID_Call{Call: _e.mock.On("SetScaleSetLastMessageID", ctx, scaleSetID, lastMessageID)} -} - -func (_c *Store_SetScaleSetLastMessageID_Call) Run(run func(ctx context.Context, scaleSetID uint, lastMessageID int64)) *Store_SetScaleSetLastMessageID_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint), args[2].(int64)) - }) - return _c -} - -func (_c *Store_SetScaleSetLastMessageID_Call) Return(_a0 error) *Store_SetScaleSetLastMessageID_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Store_SetScaleSetLastMessageID_Call) RunAndReturn(run func(context.Context, uint, int64) error) *Store_SetScaleSetLastMessageID_Call { - _c.Call.Return(run) - return _c -} - // UnlockJob provides a mock function with given fields: ctx, jobID, entityID func (_m *Store) UnlockJob(ctx context.Context, jobID int64, entityID string) error { ret := _m.Called(ctx, jobID, entityID) @@ -4282,36 +1516,6 @@ func (_m *Store) UnlockJob(ctx context.Context, jobID int64, entityID string) er return r0 } -// Store_UnlockJob_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UnlockJob' -type Store_UnlockJob_Call struct { - *mock.Call -} - -// UnlockJob is a helper method to define mock.On call -// - ctx context.Context -// - jobID int64 -// - entityID string -func (_e *Store_Expecter) UnlockJob(ctx interface{}, jobID interface{}, entityID interface{}) *Store_UnlockJob_Call { - return &Store_UnlockJob_Call{Call: _e.mock.On("UnlockJob", ctx, jobID, entityID)} -} - -func (_c *Store_UnlockJob_Call) Run(run func(ctx context.Context, jobID int64, entityID string)) *Store_UnlockJob_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(int64), args[2].(string)) - }) - return _c -} - -func (_c *Store_UnlockJob_Call) Return(_a0 error) *Store_UnlockJob_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Store_UnlockJob_Call) RunAndReturn(run func(context.Context, int64, string) error) *Store_UnlockJob_Call { - _c.Call.Return(run) - return _c -} - // UpdateController provides a mock function with given fields: info func (_m *Store) UpdateController(info params.UpdateControllerParams) (params.ControllerInfo, error) { ret := _m.Called(info) @@ -4340,34 +1544,6 @@ func (_m *Store) UpdateController(info params.UpdateControllerParams) (params.Co return r0, r1 } -// Store_UpdateController_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateController' -type Store_UpdateController_Call struct { - *mock.Call -} - -// UpdateController is a helper method to define mock.On call -// - info params.UpdateControllerParams -func (_e *Store_Expecter) UpdateController(info interface{}) *Store_UpdateController_Call { - return &Store_UpdateController_Call{Call: _e.mock.On("UpdateController", info)} -} - -func (_c *Store_UpdateController_Call) Run(run func(info params.UpdateControllerParams)) *Store_UpdateController_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(params.UpdateControllerParams)) - }) - return _c -} - -func (_c *Store_UpdateController_Call) Return(_a0 params.ControllerInfo, _a1 error) *Store_UpdateController_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_UpdateController_Call) RunAndReturn(run func(params.UpdateControllerParams) (params.ControllerInfo, error)) *Store_UpdateController_Call { - _c.Call.Return(run) - return _c -} - // UpdateEnterprise provides a mock function with given fields: ctx, enterpriseID, param func (_m *Store) UpdateEnterprise(ctx context.Context, enterpriseID string, param params.UpdateEntityParams) (params.Enterprise, error) { ret := _m.Called(ctx, enterpriseID, param) @@ -4396,38 +1572,8 @@ func (_m *Store) UpdateEnterprise(ctx context.Context, enterpriseID string, para return r0, r1 } -// Store_UpdateEnterprise_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateEnterprise' -type Store_UpdateEnterprise_Call struct { - *mock.Call -} - -// UpdateEnterprise is a helper method to define mock.On call -// - ctx context.Context -// - enterpriseID string -// - param params.UpdateEntityParams -func (_e *Store_Expecter) UpdateEnterprise(ctx interface{}, enterpriseID interface{}, param interface{}) *Store_UpdateEnterprise_Call { - return &Store_UpdateEnterprise_Call{Call: _e.mock.On("UpdateEnterprise", ctx, enterpriseID, param)} -} - -func (_c *Store_UpdateEnterprise_Call) Run(run func(ctx context.Context, enterpriseID string, param params.UpdateEntityParams)) *Store_UpdateEnterprise_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(params.UpdateEntityParams)) - }) - return _c -} - -func (_c *Store_UpdateEnterprise_Call) Return(_a0 params.Enterprise, _a1 error) *Store_UpdateEnterprise_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_UpdateEnterprise_Call) RunAndReturn(run func(context.Context, string, params.UpdateEntityParams) (params.Enterprise, error)) *Store_UpdateEnterprise_Call { - _c.Call.Return(run) - return _c -} - // UpdateEntityPool provides a mock function with given fields: ctx, entity, poolID, param -func (_m *Store) UpdateEntityPool(ctx context.Context, entity params.ForgeEntity, poolID string, param params.UpdatePoolParams) (params.Pool, error) { +func (_m *Store) UpdateEntityPool(ctx context.Context, entity params.GithubEntity, poolID string, param params.UpdatePoolParams) (params.Pool, error) { ret := _m.Called(ctx, entity, poolID, param) if len(ret) == 0 { @@ -4436,16 +1582,16 @@ func (_m *Store) UpdateEntityPool(ctx context.Context, entity params.ForgeEntity var r0 params.Pool var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, string, params.UpdatePoolParams) (params.Pool, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, string, params.UpdatePoolParams) (params.Pool, error)); ok { return rf(ctx, entity, poolID, param) } - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, string, params.UpdatePoolParams) params.Pool); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, string, params.UpdatePoolParams) params.Pool); ok { r0 = rf(ctx, entity, poolID, param) } else { r0 = ret.Get(0).(params.Pool) } - if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntity, string, params.UpdatePoolParams) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, params.GithubEntity, string, params.UpdatePoolParams) error); ok { r1 = rf(ctx, entity, poolID, param) } else { r1 = ret.Error(1) @@ -4454,230 +1600,23 @@ func (_m *Store) UpdateEntityPool(ctx context.Context, entity params.ForgeEntity return r0, r1 } -// Store_UpdateEntityPool_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateEntityPool' -type Store_UpdateEntityPool_Call struct { - *mock.Call -} - -// UpdateEntityPool is a helper method to define mock.On call -// - ctx context.Context -// - entity params.ForgeEntity -// - poolID string -// - param params.UpdatePoolParams -func (_e *Store_Expecter) UpdateEntityPool(ctx interface{}, entity interface{}, poolID interface{}, param interface{}) *Store_UpdateEntityPool_Call { - return &Store_UpdateEntityPool_Call{Call: _e.mock.On("UpdateEntityPool", ctx, entity, poolID, param)} -} - -func (_c *Store_UpdateEntityPool_Call) Run(run func(ctx context.Context, entity params.ForgeEntity, poolID string, param params.UpdatePoolParams)) *Store_UpdateEntityPool_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.ForgeEntity), args[2].(string), args[3].(params.UpdatePoolParams)) - }) - return _c -} - -func (_c *Store_UpdateEntityPool_Call) Return(_a0 params.Pool, _a1 error) *Store_UpdateEntityPool_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_UpdateEntityPool_Call) RunAndReturn(run func(context.Context, params.ForgeEntity, string, params.UpdatePoolParams) (params.Pool, error)) *Store_UpdateEntityPool_Call { - _c.Call.Return(run) - return _c -} - -// UpdateEntityScaleSet provides a mock function with given fields: _a0, entity, scaleSetID, param, callback -func (_m *Store) UpdateEntityScaleSet(_a0 context.Context, entity params.ForgeEntity, scaleSetID uint, param params.UpdateScaleSetParams, callback func(params.ScaleSet, params.ScaleSet) error) (params.ScaleSet, error) { - ret := _m.Called(_a0, entity, scaleSetID, param, callback) - - if len(ret) == 0 { - panic("no return value specified for UpdateEntityScaleSet") - } - - var r0 params.ScaleSet - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, uint, params.UpdateScaleSetParams, func(params.ScaleSet, params.ScaleSet) error) (params.ScaleSet, error)); ok { - return rf(_a0, entity, scaleSetID, param, callback) - } - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, uint, params.UpdateScaleSetParams, func(params.ScaleSet, params.ScaleSet) error) params.ScaleSet); ok { - r0 = rf(_a0, entity, scaleSetID, param, callback) - } else { - r0 = ret.Get(0).(params.ScaleSet) - } - - if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntity, uint, params.UpdateScaleSetParams, func(params.ScaleSet, params.ScaleSet) error) error); ok { - r1 = rf(_a0, entity, scaleSetID, param, callback) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Store_UpdateEntityScaleSet_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateEntityScaleSet' -type Store_UpdateEntityScaleSet_Call struct { - *mock.Call -} - -// UpdateEntityScaleSet is a helper method to define mock.On call -// - _a0 context.Context -// - entity params.ForgeEntity -// - scaleSetID uint -// - param params.UpdateScaleSetParams -// - callback func(params.ScaleSet , params.ScaleSet) error -func (_e *Store_Expecter) UpdateEntityScaleSet(_a0 interface{}, entity interface{}, scaleSetID interface{}, param interface{}, callback interface{}) *Store_UpdateEntityScaleSet_Call { - return &Store_UpdateEntityScaleSet_Call{Call: _e.mock.On("UpdateEntityScaleSet", _a0, entity, scaleSetID, param, callback)} -} - -func (_c *Store_UpdateEntityScaleSet_Call) Run(run func(_a0 context.Context, entity params.ForgeEntity, scaleSetID uint, param params.UpdateScaleSetParams, callback func(params.ScaleSet, params.ScaleSet) error)) *Store_UpdateEntityScaleSet_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.ForgeEntity), args[2].(uint), args[3].(params.UpdateScaleSetParams), args[4].(func(params.ScaleSet, params.ScaleSet) error)) - }) - return _c -} - -func (_c *Store_UpdateEntityScaleSet_Call) Return(updatedScaleSet params.ScaleSet, err error) *Store_UpdateEntityScaleSet_Call { - _c.Call.Return(updatedScaleSet, err) - return _c -} - -func (_c *Store_UpdateEntityScaleSet_Call) RunAndReturn(run func(context.Context, params.ForgeEntity, uint, params.UpdateScaleSetParams, func(params.ScaleSet, params.ScaleSet) error) (params.ScaleSet, error)) *Store_UpdateEntityScaleSet_Call { - _c.Call.Return(run) - return _c -} - -// UpdateGiteaCredentials provides a mock function with given fields: ctx, id, param -func (_m *Store) UpdateGiteaCredentials(ctx context.Context, id uint, param params.UpdateGiteaCredentialsParams) (params.ForgeCredentials, error) { - ret := _m.Called(ctx, id, param) - - if len(ret) == 0 { - panic("no return value specified for UpdateGiteaCredentials") - } - - var r0 params.ForgeCredentials - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, uint, params.UpdateGiteaCredentialsParams) (params.ForgeCredentials, error)); ok { - return rf(ctx, id, param) - } - if rf, ok := ret.Get(0).(func(context.Context, uint, params.UpdateGiteaCredentialsParams) params.ForgeCredentials); ok { - r0 = rf(ctx, id, param) - } else { - r0 = ret.Get(0).(params.ForgeCredentials) - } - - if rf, ok := ret.Get(1).(func(context.Context, uint, params.UpdateGiteaCredentialsParams) error); ok { - r1 = rf(ctx, id, param) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Store_UpdateGiteaCredentials_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateGiteaCredentials' -type Store_UpdateGiteaCredentials_Call struct { - *mock.Call -} - -// UpdateGiteaCredentials is a helper method to define mock.On call -// - ctx context.Context -// - id uint -// - param params.UpdateGiteaCredentialsParams -func (_e *Store_Expecter) UpdateGiteaCredentials(ctx interface{}, id interface{}, param interface{}) *Store_UpdateGiteaCredentials_Call { - return &Store_UpdateGiteaCredentials_Call{Call: _e.mock.On("UpdateGiteaCredentials", ctx, id, param)} -} - -func (_c *Store_UpdateGiteaCredentials_Call) Run(run func(ctx context.Context, id uint, param params.UpdateGiteaCredentialsParams)) *Store_UpdateGiteaCredentials_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint), args[2].(params.UpdateGiteaCredentialsParams)) - }) - return _c -} - -func (_c *Store_UpdateGiteaCredentials_Call) Return(gtCreds params.ForgeCredentials, err error) *Store_UpdateGiteaCredentials_Call { - _c.Call.Return(gtCreds, err) - return _c -} - -func (_c *Store_UpdateGiteaCredentials_Call) RunAndReturn(run func(context.Context, uint, params.UpdateGiteaCredentialsParams) (params.ForgeCredentials, error)) *Store_UpdateGiteaCredentials_Call { - _c.Call.Return(run) - return _c -} - -// UpdateGiteaEndpoint provides a mock function with given fields: _a0, name, param -func (_m *Store) UpdateGiteaEndpoint(_a0 context.Context, name string, param params.UpdateGiteaEndpointParams) (params.ForgeEndpoint, error) { - ret := _m.Called(_a0, name, param) - - if len(ret) == 0 { - panic("no return value specified for UpdateGiteaEndpoint") - } - - var r0 params.ForgeEndpoint - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, params.UpdateGiteaEndpointParams) (params.ForgeEndpoint, error)); ok { - return rf(_a0, name, param) - } - if rf, ok := ret.Get(0).(func(context.Context, string, params.UpdateGiteaEndpointParams) params.ForgeEndpoint); ok { - r0 = rf(_a0, name, param) - } else { - r0 = ret.Get(0).(params.ForgeEndpoint) - } - - if rf, ok := ret.Get(1).(func(context.Context, string, params.UpdateGiteaEndpointParams) error); ok { - r1 = rf(_a0, name, param) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Store_UpdateGiteaEndpoint_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateGiteaEndpoint' -type Store_UpdateGiteaEndpoint_Call struct { - *mock.Call -} - -// UpdateGiteaEndpoint is a helper method to define mock.On call -// - _a0 context.Context -// - name string -// - param params.UpdateGiteaEndpointParams -func (_e *Store_Expecter) UpdateGiteaEndpoint(_a0 interface{}, name interface{}, param interface{}) *Store_UpdateGiteaEndpoint_Call { - return &Store_UpdateGiteaEndpoint_Call{Call: _e.mock.On("UpdateGiteaEndpoint", _a0, name, param)} -} - -func (_c *Store_UpdateGiteaEndpoint_Call) Run(run func(_a0 context.Context, name string, param params.UpdateGiteaEndpointParams)) *Store_UpdateGiteaEndpoint_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(params.UpdateGiteaEndpointParams)) - }) - return _c -} - -func (_c *Store_UpdateGiteaEndpoint_Call) Return(ghEndpoint params.ForgeEndpoint, err error) *Store_UpdateGiteaEndpoint_Call { - _c.Call.Return(ghEndpoint, err) - return _c -} - -func (_c *Store_UpdateGiteaEndpoint_Call) RunAndReturn(run func(context.Context, string, params.UpdateGiteaEndpointParams) (params.ForgeEndpoint, error)) *Store_UpdateGiteaEndpoint_Call { - _c.Call.Return(run) - return _c -} - // UpdateGithubCredentials provides a mock function with given fields: ctx, id, param -func (_m *Store) UpdateGithubCredentials(ctx context.Context, id uint, param params.UpdateGithubCredentialsParams) (params.ForgeCredentials, error) { +func (_m *Store) UpdateGithubCredentials(ctx context.Context, id uint, param params.UpdateGithubCredentialsParams) (params.GithubCredentials, error) { ret := _m.Called(ctx, id, param) if len(ret) == 0 { panic("no return value specified for UpdateGithubCredentials") } - var r0 params.ForgeCredentials + var r0 params.GithubCredentials var r1 error - if rf, ok := ret.Get(0).(func(context.Context, uint, params.UpdateGithubCredentialsParams) (params.ForgeCredentials, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, uint, params.UpdateGithubCredentialsParams) (params.GithubCredentials, error)); ok { return rf(ctx, id, param) } - if rf, ok := ret.Get(0).(func(context.Context, uint, params.UpdateGithubCredentialsParams) params.ForgeCredentials); ok { + if rf, ok := ret.Get(0).(func(context.Context, uint, params.UpdateGithubCredentialsParams) params.GithubCredentials); ok { r0 = rf(ctx, id, param) } else { - r0 = ret.Get(0).(params.ForgeCredentials) + r0 = ret.Get(0).(params.GithubCredentials) } if rf, ok := ret.Get(1).(func(context.Context, uint, params.UpdateGithubCredentialsParams) error); ok { @@ -4689,53 +1628,23 @@ func (_m *Store) UpdateGithubCredentials(ctx context.Context, id uint, param par return r0, r1 } -// Store_UpdateGithubCredentials_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateGithubCredentials' -type Store_UpdateGithubCredentials_Call struct { - *mock.Call -} - -// UpdateGithubCredentials is a helper method to define mock.On call -// - ctx context.Context -// - id uint -// - param params.UpdateGithubCredentialsParams -func (_e *Store_Expecter) UpdateGithubCredentials(ctx interface{}, id interface{}, param interface{}) *Store_UpdateGithubCredentials_Call { - return &Store_UpdateGithubCredentials_Call{Call: _e.mock.On("UpdateGithubCredentials", ctx, id, param)} -} - -func (_c *Store_UpdateGithubCredentials_Call) Run(run func(ctx context.Context, id uint, param params.UpdateGithubCredentialsParams)) *Store_UpdateGithubCredentials_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint), args[2].(params.UpdateGithubCredentialsParams)) - }) - return _c -} - -func (_c *Store_UpdateGithubCredentials_Call) Return(_a0 params.ForgeCredentials, _a1 error) *Store_UpdateGithubCredentials_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_UpdateGithubCredentials_Call) RunAndReturn(run func(context.Context, uint, params.UpdateGithubCredentialsParams) (params.ForgeCredentials, error)) *Store_UpdateGithubCredentials_Call { - _c.Call.Return(run) - return _c -} - // UpdateGithubEndpoint provides a mock function with given fields: ctx, name, param -func (_m *Store) UpdateGithubEndpoint(ctx context.Context, name string, param params.UpdateGithubEndpointParams) (params.ForgeEndpoint, error) { +func (_m *Store) UpdateGithubEndpoint(ctx context.Context, name string, param params.UpdateGithubEndpointParams) (params.GithubEndpoint, error) { ret := _m.Called(ctx, name, param) if len(ret) == 0 { panic("no return value specified for UpdateGithubEndpoint") } - var r0 params.ForgeEndpoint + var r0 params.GithubEndpoint var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, params.UpdateGithubEndpointParams) (params.ForgeEndpoint, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, params.UpdateGithubEndpointParams) (params.GithubEndpoint, error)); ok { return rf(ctx, name, param) } - if rf, ok := ret.Get(0).(func(context.Context, string, params.UpdateGithubEndpointParams) params.ForgeEndpoint); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, params.UpdateGithubEndpointParams) params.GithubEndpoint); ok { r0 = rf(ctx, name, param) } else { - r0 = ret.Get(0).(params.ForgeEndpoint) + r0 = ret.Get(0).(params.GithubEndpoint) } if rf, ok := ret.Get(1).(func(context.Context, string, params.UpdateGithubEndpointParams) error); ok { @@ -4747,39 +1656,9 @@ func (_m *Store) UpdateGithubEndpoint(ctx context.Context, name string, param pa return r0, r1 } -// Store_UpdateGithubEndpoint_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateGithubEndpoint' -type Store_UpdateGithubEndpoint_Call struct { - *mock.Call -} - -// UpdateGithubEndpoint is a helper method to define mock.On call -// - ctx context.Context -// - name string -// - param params.UpdateGithubEndpointParams -func (_e *Store_Expecter) UpdateGithubEndpoint(ctx interface{}, name interface{}, param interface{}) *Store_UpdateGithubEndpoint_Call { - return &Store_UpdateGithubEndpoint_Call{Call: _e.mock.On("UpdateGithubEndpoint", ctx, name, param)} -} - -func (_c *Store_UpdateGithubEndpoint_Call) Run(run func(ctx context.Context, name string, param params.UpdateGithubEndpointParams)) *Store_UpdateGithubEndpoint_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(params.UpdateGithubEndpointParams)) - }) - return _c -} - -func (_c *Store_UpdateGithubEndpoint_Call) Return(_a0 params.ForgeEndpoint, _a1 error) *Store_UpdateGithubEndpoint_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_UpdateGithubEndpoint_Call) RunAndReturn(run func(context.Context, string, params.UpdateGithubEndpointParams) (params.ForgeEndpoint, error)) *Store_UpdateGithubEndpoint_Call { - _c.Call.Return(run) - return _c -} - -// UpdateInstance provides a mock function with given fields: ctx, instanceNameOrID, param -func (_m *Store) UpdateInstance(ctx context.Context, instanceNameOrID string, param params.UpdateInstanceParams) (params.Instance, error) { - ret := _m.Called(ctx, instanceNameOrID, param) +// UpdateInstance provides a mock function with given fields: ctx, instanceName, param +func (_m *Store) UpdateInstance(ctx context.Context, instanceName string, param params.UpdateInstanceParams) (params.Instance, error) { + ret := _m.Called(ctx, instanceName, param) if len(ret) == 0 { panic("no return value specified for UpdateInstance") @@ -4788,16 +1667,16 @@ func (_m *Store) UpdateInstance(ctx context.Context, instanceNameOrID string, pa var r0 params.Instance var r1 error if rf, ok := ret.Get(0).(func(context.Context, string, params.UpdateInstanceParams) (params.Instance, error)); ok { - return rf(ctx, instanceNameOrID, param) + return rf(ctx, instanceName, param) } if rf, ok := ret.Get(0).(func(context.Context, string, params.UpdateInstanceParams) params.Instance); ok { - r0 = rf(ctx, instanceNameOrID, param) + r0 = rf(ctx, instanceName, param) } else { r0 = ret.Get(0).(params.Instance) } if rf, ok := ret.Get(1).(func(context.Context, string, params.UpdateInstanceParams) error); ok { - r1 = rf(ctx, instanceNameOrID, param) + r1 = rf(ctx, instanceName, param) } else { r1 = ret.Error(1) } @@ -4805,36 +1684,6 @@ func (_m *Store) UpdateInstance(ctx context.Context, instanceNameOrID string, pa return r0, r1 } -// Store_UpdateInstance_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateInstance' -type Store_UpdateInstance_Call struct { - *mock.Call -} - -// UpdateInstance is a helper method to define mock.On call -// - ctx context.Context -// - instanceNameOrID string -// - param params.UpdateInstanceParams -func (_e *Store_Expecter) UpdateInstance(ctx interface{}, instanceNameOrID interface{}, param interface{}) *Store_UpdateInstance_Call { - return &Store_UpdateInstance_Call{Call: _e.mock.On("UpdateInstance", ctx, instanceNameOrID, param)} -} - -func (_c *Store_UpdateInstance_Call) Run(run func(ctx context.Context, instanceNameOrID string, param params.UpdateInstanceParams)) *Store_UpdateInstance_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(params.UpdateInstanceParams)) - }) - return _c -} - -func (_c *Store_UpdateInstance_Call) Return(_a0 params.Instance, _a1 error) *Store_UpdateInstance_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_UpdateInstance_Call) RunAndReturn(run func(context.Context, string, params.UpdateInstanceParams) (params.Instance, error)) *Store_UpdateInstance_Call { - _c.Call.Return(run) - return _c -} - // UpdateOrganization provides a mock function with given fields: ctx, orgID, param func (_m *Store) UpdateOrganization(ctx context.Context, orgID string, param params.UpdateEntityParams) (params.Organization, error) { ret := _m.Called(ctx, orgID, param) @@ -4863,36 +1712,6 @@ func (_m *Store) UpdateOrganization(ctx context.Context, orgID string, param par return r0, r1 } -// Store_UpdateOrganization_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateOrganization' -type Store_UpdateOrganization_Call struct { - *mock.Call -} - -// UpdateOrganization is a helper method to define mock.On call -// - ctx context.Context -// - orgID string -// - param params.UpdateEntityParams -func (_e *Store_Expecter) UpdateOrganization(ctx interface{}, orgID interface{}, param interface{}) *Store_UpdateOrganization_Call { - return &Store_UpdateOrganization_Call{Call: _e.mock.On("UpdateOrganization", ctx, orgID, param)} -} - -func (_c *Store_UpdateOrganization_Call) Run(run func(ctx context.Context, orgID string, param params.UpdateEntityParams)) *Store_UpdateOrganization_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(params.UpdateEntityParams)) - }) - return _c -} - -func (_c *Store_UpdateOrganization_Call) Return(_a0 params.Organization, _a1 error) *Store_UpdateOrganization_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_UpdateOrganization_Call) RunAndReturn(run func(context.Context, string, params.UpdateEntityParams) (params.Organization, error)) *Store_UpdateOrganization_Call { - _c.Call.Return(run) - return _c -} - // UpdateRepository provides a mock function with given fields: ctx, repoID, param func (_m *Store) UpdateRepository(ctx context.Context, repoID string, param params.UpdateEntityParams) (params.Repository, error) { ret := _m.Called(ctx, repoID, param) @@ -4921,36 +1740,6 @@ func (_m *Store) UpdateRepository(ctx context.Context, repoID string, param para return r0, r1 } -// Store_UpdateRepository_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateRepository' -type Store_UpdateRepository_Call struct { - *mock.Call -} - -// UpdateRepository is a helper method to define mock.On call -// - ctx context.Context -// - repoID string -// - param params.UpdateEntityParams -func (_e *Store_Expecter) UpdateRepository(ctx interface{}, repoID interface{}, param interface{}) *Store_UpdateRepository_Call { - return &Store_UpdateRepository_Call{Call: _e.mock.On("UpdateRepository", ctx, repoID, param)} -} - -func (_c *Store_UpdateRepository_Call) Run(run func(ctx context.Context, repoID string, param params.UpdateEntityParams)) *Store_UpdateRepository_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(params.UpdateEntityParams)) - }) - return _c -} - -func (_c *Store_UpdateRepository_Call) Return(_a0 params.Repository, _a1 error) *Store_UpdateRepository_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_UpdateRepository_Call) RunAndReturn(run func(context.Context, string, params.UpdateEntityParams) (params.Repository, error)) *Store_UpdateRepository_Call { - _c.Call.Return(run) - return _c -} - // UpdateUser provides a mock function with given fields: ctx, user, param func (_m *Store) UpdateUser(ctx context.Context, user string, param params.UpdateUserParams) (params.User, error) { ret := _m.Called(ctx, user, param) @@ -4979,36 +1768,6 @@ func (_m *Store) UpdateUser(ctx context.Context, user string, param params.Updat return r0, r1 } -// Store_UpdateUser_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateUser' -type Store_UpdateUser_Call struct { - *mock.Call -} - -// UpdateUser is a helper method to define mock.On call -// - ctx context.Context -// - user string -// - param params.UpdateUserParams -func (_e *Store_Expecter) UpdateUser(ctx interface{}, user interface{}, param interface{}) *Store_UpdateUser_Call { - return &Store_UpdateUser_Call{Call: _e.mock.On("UpdateUser", ctx, user, param)} -} - -func (_c *Store_UpdateUser_Call) Run(run func(ctx context.Context, user string, param params.UpdateUserParams)) *Store_UpdateUser_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(params.UpdateUserParams)) - }) - return _c -} - -func (_c *Store_UpdateUser_Call) Return(_a0 params.User, _a1 error) *Store_UpdateUser_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Store_UpdateUser_Call) RunAndReturn(run func(context.Context, string, params.UpdateUserParams) (params.User, error)) *Store_UpdateUser_Call { - _c.Call.Return(run) - return _c -} - // NewStore creates a new instance of Store. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewStore(t interface { diff --git a/database/common/store.go b/database/common/store.go index 0cf5d929..4d91e6cd 100644 --- a/database/common/store.go +++ b/database/common/store.go @@ -21,45 +21,45 @@ import ( ) type GithubEndpointStore interface { - CreateGithubEndpoint(ctx context.Context, param params.CreateGithubEndpointParams) (params.ForgeEndpoint, error) - GetGithubEndpoint(ctx context.Context, name string) (params.ForgeEndpoint, error) - ListGithubEndpoints(ctx context.Context) ([]params.ForgeEndpoint, error) - UpdateGithubEndpoint(ctx context.Context, name string, param params.UpdateGithubEndpointParams) (params.ForgeEndpoint, error) + CreateGithubEndpoint(ctx context.Context, param params.CreateGithubEndpointParams) (params.GithubEndpoint, error) + GetGithubEndpoint(ctx context.Context, name string) (params.GithubEndpoint, error) + ListGithubEndpoints(ctx context.Context) ([]params.GithubEndpoint, error) + UpdateGithubEndpoint(ctx context.Context, name string, param params.UpdateGithubEndpointParams) (params.GithubEndpoint, error) DeleteGithubEndpoint(ctx context.Context, name string) error } type GithubCredentialsStore interface { - CreateGithubCredentials(ctx context.Context, param params.CreateGithubCredentialsParams) (params.ForgeCredentials, error) - GetGithubCredentials(ctx context.Context, id uint, detailed bool) (params.ForgeCredentials, error) - GetGithubCredentialsByName(ctx context.Context, name string, detailed bool) (params.ForgeCredentials, error) - ListGithubCredentials(ctx context.Context) ([]params.ForgeCredentials, error) - UpdateGithubCredentials(ctx context.Context, id uint, param params.UpdateGithubCredentialsParams) (params.ForgeCredentials, error) + CreateGithubCredentials(ctx context.Context, param params.CreateGithubCredentialsParams) (params.GithubCredentials, error) + GetGithubCredentials(ctx context.Context, id uint, detailed bool) (params.GithubCredentials, error) + GetGithubCredentialsByName(ctx context.Context, name string, detailed bool) (params.GithubCredentials, error) + ListGithubCredentials(ctx context.Context) ([]params.GithubCredentials, error) + UpdateGithubCredentials(ctx context.Context, id uint, param params.UpdateGithubCredentialsParams) (params.GithubCredentials, error) DeleteGithubCredentials(ctx context.Context, id uint) error } type RepoStore interface { - CreateRepository(ctx context.Context, owner, name string, credentials params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (param params.Repository, err error) + CreateRepository(ctx context.Context, owner, name, credentialsName, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Repository, error) GetRepository(ctx context.Context, owner, name, endpointName string) (params.Repository, error) GetRepositoryByID(ctx context.Context, repoID string) (params.Repository, error) - ListRepositories(ctx context.Context, filter params.RepositoryFilter) ([]params.Repository, error) + ListRepositories(ctx context.Context) ([]params.Repository, error) DeleteRepository(ctx context.Context, repoID string) error UpdateRepository(ctx context.Context, repoID string, param params.UpdateEntityParams) (params.Repository, error) } type OrgStore interface { - CreateOrganization(ctx context.Context, name string, credentials params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (org params.Organization, err error) + CreateOrganization(ctx context.Context, name, credentialsName, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Organization, error) GetOrganization(ctx context.Context, name, endpointName string) (params.Organization, error) GetOrganizationByID(ctx context.Context, orgID string) (params.Organization, error) - ListOrganizations(ctx context.Context, filter params.OrganizationFilter) ([]params.Organization, error) + ListOrganizations(ctx context.Context) ([]params.Organization, error) DeleteOrganization(ctx context.Context, orgID string) error UpdateOrganization(ctx context.Context, orgID string, param params.UpdateEntityParams) (params.Organization, error) } type EnterpriseStore interface { - CreateEnterprise(ctx context.Context, name string, credentialsName params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Enterprise, error) + CreateEnterprise(ctx context.Context, name, credentialsName, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Enterprise, error) GetEnterprise(ctx context.Context, name, endpointName string) (params.Enterprise, error) GetEnterpriseByID(ctx context.Context, enterpriseID string) (params.Enterprise, error) - ListEnterprises(ctx context.Context, filter params.EnterpriseFilter) ([]params.Enterprise, error) + ListEnterprises(ctx context.Context) ([]params.Enterprise, error) DeleteEnterprise(ctx context.Context, enterpriseID string) error UpdateEnterprise(ctx context.Context, enterpriseID string, param params.UpdateEntityParams) (params.Enterprise, error) } @@ -75,7 +75,8 @@ type PoolStore interface { ListPoolInstances(ctx context.Context, poolID string) ([]params.Instance, error) PoolInstanceCount(ctx context.Context, poolID string) (int64, error) - FindPoolsMatchingAllTags(ctx context.Context, entityType params.ForgeEntityType, entityID string, tags []string) ([]params.Pool, error) + GetPoolInstanceByName(ctx context.Context, poolID string, instanceName string) (params.Instance, error) + FindPoolsMatchingAllTags(ctx context.Context, entityType params.GithubEntityType, entityID string, tags []string) ([]params.Pool, error) } type UserStore interface { @@ -90,9 +91,8 @@ type UserStore interface { type InstanceStore interface { CreateInstance(ctx context.Context, poolID string, param params.CreateInstanceParams) (params.Instance, error) - DeleteInstance(ctx context.Context, poolID string, instanceNameOrID string) error - DeleteInstanceByName(ctx context.Context, instanceName string) error - UpdateInstance(ctx context.Context, instanceNameOrID string, param params.UpdateInstanceParams) (params.Instance, error) + DeleteInstance(ctx context.Context, poolID string, instanceName string) error + UpdateInstance(ctx context.Context, instanceName string, param params.UpdateInstanceParams) (params.Instance, error) // Probably a bad idea without some king of filter or at least pagination // @@ -100,13 +100,13 @@ type InstanceStore interface { // TODO: add filter/pagination ListAllInstances(ctx context.Context) ([]params.Instance, error) - GetInstance(ctx context.Context, instanceNameOrID string) (params.Instance, error) - AddInstanceEvent(ctx context.Context, instanceNameOrID string, event params.EventType, eventLevel params.EventLevel, eventMessage string) error + GetInstanceByName(ctx context.Context, instanceName string) (params.Instance, error) + AddInstanceEvent(ctx context.Context, instanceName string, event params.EventType, eventLevel params.EventLevel, eventMessage string) error } type JobsStore interface { CreateOrUpdateJob(ctx context.Context, job params.Job) (params.Job, error) - ListEntityJobsByStatus(ctx context.Context, entityType params.ForgeEntityType, entityID string, status params.JobStatus) ([]params.Job, error) + ListEntityJobsByStatus(ctx context.Context, entityType params.GithubEntityType, entityID string, status params.JobStatus) ([]params.Job, error) ListJobsByStatus(ctx context.Context, status params.JobStatus) ([]params.Job, error) ListAllJobs(ctx context.Context) ([]params.Job, error) @@ -120,13 +120,13 @@ type JobsStore interface { } type EntityPoolStore interface { - CreateEntityPool(ctx context.Context, entity params.ForgeEntity, param params.CreatePoolParams) (params.Pool, error) - GetEntityPool(ctx context.Context, entity params.ForgeEntity, poolID string) (params.Pool, error) - DeleteEntityPool(ctx context.Context, entity params.ForgeEntity, poolID string) error - UpdateEntityPool(ctx context.Context, entity params.ForgeEntity, poolID string, param params.UpdatePoolParams) (params.Pool, error) + CreateEntityPool(ctx context.Context, entity params.GithubEntity, param params.CreatePoolParams) (params.Pool, error) + GetEntityPool(ctx context.Context, entity params.GithubEntity, poolID string) (params.Pool, error) + DeleteEntityPool(ctx context.Context, entity params.GithubEntity, poolID string) error + UpdateEntityPool(ctx context.Context, entity params.GithubEntity, poolID string, param params.UpdatePoolParams) (params.Pool, error) - ListEntityPools(ctx context.Context, entity params.ForgeEntity) ([]params.Pool, error) - ListEntityInstances(ctx context.Context, entity params.ForgeEntity) ([]params.Instance, error) + ListEntityPools(ctx context.Context, entity params.GithubEntity) ([]params.Pool, error) + ListEntityInstances(ctx context.Context, entity params.GithubEntity) ([]params.Instance, error) } type ControllerStore interface { @@ -135,40 +135,7 @@ type ControllerStore interface { UpdateController(info params.UpdateControllerParams) (params.ControllerInfo, error) } -type ScaleSetsStore interface { - ListAllScaleSets(ctx context.Context) ([]params.ScaleSet, error) - CreateEntityScaleSet(_ context.Context, entity params.ForgeEntity, param params.CreateScaleSetParams) (scaleSet params.ScaleSet, err error) - ListEntityScaleSets(_ context.Context, entity params.ForgeEntity) ([]params.ScaleSet, error) - UpdateEntityScaleSet(_ context.Context, entity params.ForgeEntity, scaleSetID uint, param params.UpdateScaleSetParams, callback func(old, newSet params.ScaleSet) error) (updatedScaleSet params.ScaleSet, err error) - GetScaleSetByID(ctx context.Context, scaleSet uint) (params.ScaleSet, error) - DeleteScaleSetByID(ctx context.Context, scaleSetID uint) (err error) - SetScaleSetLastMessageID(ctx context.Context, scaleSetID uint, lastMessageID int64) error - SetScaleSetDesiredRunnerCount(ctx context.Context, scaleSetID uint, desiredRunnerCount int) error -} - -type ScaleSetInstanceStore interface { - ListScaleSetInstances(_ context.Context, scalesetID uint) ([]params.Instance, error) - CreateScaleSetInstance(_ context.Context, scaleSetID uint, param params.CreateInstanceParams) (instance params.Instance, err error) -} - -type GiteaEndpointStore interface { - CreateGiteaEndpoint(_ context.Context, param params.CreateGiteaEndpointParams) (ghEndpoint params.ForgeEndpoint, err error) - ListGiteaEndpoints(_ context.Context) ([]params.ForgeEndpoint, error) - DeleteGiteaEndpoint(_ context.Context, name string) (err error) - GetGiteaEndpoint(_ context.Context, name string) (params.ForgeEndpoint, error) - UpdateGiteaEndpoint(_ context.Context, name string, param params.UpdateGiteaEndpointParams) (ghEndpoint params.ForgeEndpoint, err error) -} - -type GiteaCredentialsStore interface { - CreateGiteaCredentials(ctx context.Context, param params.CreateGiteaCredentialsParams) (gtCreds params.ForgeCredentials, err error) - GetGiteaCredentialsByName(ctx context.Context, name string, detailed bool) (params.ForgeCredentials, error) - GetGiteaCredentials(ctx context.Context, id uint, detailed bool) (params.ForgeCredentials, error) - ListGiteaCredentials(ctx context.Context) ([]params.ForgeCredentials, error) - UpdateGiteaCredentials(ctx context.Context, id uint, param params.UpdateGiteaCredentialsParams) (gtCreds params.ForgeCredentials, err error) - DeleteGiteaCredentials(ctx context.Context, id uint) (err error) -} - -//go:generate go run github.com/vektra/mockery/v2@latest +//go:generate mockery --name=Store type Store interface { RepoStore OrgStore @@ -181,13 +148,7 @@ type Store interface { GithubCredentialsStore ControllerStore EntityPoolStore - ScaleSetsStore - ScaleSetInstanceStore - GiteaEndpointStore - GiteaCredentialsStore ControllerInfo() (params.ControllerInfo, error) InitController() (params.ControllerInfo, error) - GetForgeEntity(_ context.Context, entityType params.ForgeEntityType, entityID string) (params.ForgeEntity, error) - AddEntityEvent(ctx context.Context, entity params.ForgeEntity, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error } diff --git a/database/common/watcher.go b/database/common/watcher.go index 94152094..d8700189 100644 --- a/database/common/watcher.go +++ b/database/common/watcher.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package common import "context" @@ -32,9 +18,7 @@ const ( JobEntityType DatabaseEntityType = "job" ControllerEntityType DatabaseEntityType = "controller" GithubCredentialsEntityType DatabaseEntityType = "github_credentials" // #nosec G101 - GiteaCredentialsEntityType DatabaseEntityType = "gitea_credentials" // #nosec G101 GithubEndpointEntityType DatabaseEntityType = "github_endpoint" - ScaleSetEntityType DatabaseEntityType = "scaleset" ) const ( diff --git a/database/sql/common_test.go b/database/sql/common_test.go index a3c62e06..af0adcf9 100644 --- a/database/sql/common_test.go +++ b/database/sql/common_test.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package sql const ( diff --git a/database/sql/controller.go b/database/sql/controller.go index 5bf60763..71890c88 100644 --- a/database/sql/controller.go +++ b/database/sql/controller.go @@ -15,11 +15,10 @@ package sql import ( - "errors" - "fmt" "net/url" "github.com/google/uuid" + "github.com/pkg/errors" "gorm.io/gorm" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -31,7 +30,7 @@ import ( func dbControllerToCommonController(dbInfo ControllerInfo) (params.ControllerInfo, error) { url, err := url.JoinPath(dbInfo.WebhookBaseURL, dbInfo.ControllerID.String()) if err != nil { - return params.ControllerInfo{}, fmt.Errorf("error joining webhook URL: %w", err) + return params.ControllerInfo{}, errors.Wrap(err, "joining webhook URL") } return params.ControllerInfo{ @@ -50,27 +49,30 @@ func (s *sqlDatabase) ControllerInfo() (params.ControllerInfo, error) { q := s.conn.Model(&ControllerInfo{}).First(&info) if q.Error != nil { if errors.Is(q.Error, gorm.ErrRecordNotFound) { - return params.ControllerInfo{}, fmt.Errorf("error fetching controller info: %w", runnerErrors.ErrNotFound) + return params.ControllerInfo{}, errors.Wrap(runnerErrors.ErrNotFound, "fetching controller info") } - return params.ControllerInfo{}, fmt.Errorf("error fetching controller info: %w", q.Error) + return params.ControllerInfo{}, errors.Wrap(q.Error, "fetching controller info") } paramInfo, err := dbControllerToCommonController(info) if err != nil { - return params.ControllerInfo{}, fmt.Errorf("error converting controller info: %w", err) + return params.ControllerInfo{}, errors.Wrap(err, "converting controller info") } return paramInfo, nil } func (s *sqlDatabase) InitController() (params.ControllerInfo, error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + if _, err := s.ControllerInfo(); err == nil { return params.ControllerInfo{}, runnerErrors.NewConflictError("controller already initialized") } newID, err := uuid.NewRandom() if err != nil { - return params.ControllerInfo{}, fmt.Errorf("error generating UUID: %w", err) + return params.ControllerInfo{}, errors.Wrap(err, "generating UUID") } newInfo := ControllerInfo{ @@ -80,7 +82,7 @@ func (s *sqlDatabase) InitController() (params.ControllerInfo, error) { q := s.conn.Save(&newInfo) if q.Error != nil { - return params.ControllerInfo{}, fmt.Errorf("error saving controller info: %w", q.Error) + return params.ControllerInfo{}, errors.Wrap(q.Error, "saving controller info") } return params.ControllerInfo{ @@ -89,6 +91,9 @@ func (s *sqlDatabase) InitController() (params.ControllerInfo, error) { } func (s *sqlDatabase) UpdateController(info params.UpdateControllerParams) (paramInfo params.ControllerInfo, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + defer func() { if err == nil { s.sendNotify(common.ControllerEntityType, common.UpdateOperation, paramInfo) @@ -99,13 +104,13 @@ func (s *sqlDatabase) UpdateController(info params.UpdateControllerParams) (para q := tx.Model(&ControllerInfo{}).First(&dbInfo) if q.Error != nil { if errors.Is(q.Error, gorm.ErrRecordNotFound) { - return fmt.Errorf("error fetching controller info: %w", runnerErrors.ErrNotFound) + return errors.Wrap(runnerErrors.ErrNotFound, "fetching controller info") } - return fmt.Errorf("error fetching controller info: %w", q.Error) + return errors.Wrap(q.Error, "fetching controller info") } if err := info.Validate(); err != nil { - return fmt.Errorf("error validating controller info: %w", err) + return errors.Wrap(err, "validating controller info") } if info.MetadataURL != nil { @@ -126,17 +131,17 @@ func (s *sqlDatabase) UpdateController(info params.UpdateControllerParams) (para q = tx.Save(&dbInfo) if q.Error != nil { - return fmt.Errorf("error saving controller info: %w", q.Error) + return errors.Wrap(q.Error, "saving controller info") } return nil }) if err != nil { - return params.ControllerInfo{}, fmt.Errorf("error updating controller info: %w", err) + return params.ControllerInfo{}, errors.Wrap(err, "updating controller info") } paramInfo, err = dbControllerToCommonController(dbInfo) if err != nil { - return params.ControllerInfo{}, fmt.Errorf("error converting controller info: %w", err) + return params.ControllerInfo{}, errors.Wrap(err, "converting controller info") } return paramInfo, nil } diff --git a/database/sql/controller_test.go b/database/sql/controller_test.go index 949f675f..b4076e92 100644 --- a/database/sql/controller_test.go +++ b/database/sql/controller_test.go @@ -69,5 +69,6 @@ func (s *CtrlTestSuite) TestInitControllerAlreadyInitialized() { } func TestCtrlTestSuite(t *testing.T) { + t.Parallel() suite.Run(t, new(CtrlTestSuite)) } diff --git a/database/sql/enterprise.go b/database/sql/enterprise.go index d201cd21..9b927bed 100644 --- a/database/sql/enterprise.go +++ b/database/sql/enterprise.go @@ -16,11 +16,10 @@ package sql import ( "context" - "errors" - "fmt" "log/slog" "github.com/google/uuid" + "github.com/pkg/errors" "gorm.io/gorm" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -29,17 +28,16 @@ import ( "github.com/cloudbase/garm/params" ) -func (s *sqlDatabase) CreateEnterprise(ctx context.Context, name string, credentials params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (paramEnt params.Enterprise, err error) { +func (s *sqlDatabase) CreateEnterprise(ctx context.Context, name, credentialsName, webhookSecret string, poolBalancerType params.PoolBalancerType) (paramEnt params.Enterprise, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + if webhookSecret == "" { return params.Enterprise{}, errors.New("creating enterprise: missing secret") } - if credentials.ForgeType != params.GithubEndpointType { - return params.Enterprise{}, fmt.Errorf("enterprises are not supported on this forge type: %w", runnerErrors.ErrBadRequest) - } - secret, err := util.Seal([]byte(webhookSecret), []byte(s.cfg.Passphrase)) if err != nil { - return params.Enterprise{}, fmt.Errorf("error encoding secret: %w", err) + return params.Enterprise{}, errors.Wrap(err, "encoding secret") } defer func() { @@ -50,83 +48,78 @@ func (s *sqlDatabase) CreateEnterprise(ctx context.Context, name string, credent newEnterprise := Enterprise{ Name: name, WebhookSecret: secret, + CredentialsName: credentialsName, PoolBalancerType: poolBalancerType, } err = s.conn.Transaction(func(tx *gorm.DB) error { - newEnterprise.CredentialsID = &credentials.ID - newEnterprise.EndpointName = &credentials.Endpoint.Name + creds, err := s.getGithubCredentialsByName(ctx, tx, credentialsName, false) + if err != nil { + return errors.Wrap(err, "creating enterprise") + } + if creds.EndpointName == nil { + return errors.Wrap(runnerErrors.ErrUnprocessable, "credentials have no endpoint") + } + newEnterprise.CredentialsID = &creds.ID + newEnterprise.CredentialsName = creds.Name + newEnterprise.EndpointName = creds.EndpointName q := tx.Create(&newEnterprise) if q.Error != nil { - return fmt.Errorf("error creating enterprise: %w", q.Error) + return errors.Wrap(q.Error, "creating enterprise") } - newEnterprise, err = s.getEnterpriseByID(ctx, tx, newEnterprise.ID.String(), "Pools", "Credentials", "Endpoint", "Credentials.Endpoint") - if err != nil { - return fmt.Errorf("error creating enterprise: %w", err) - } + newEnterprise.Credentials = creds + newEnterprise.Endpoint = creds.Endpoint + return nil }) if err != nil { - return params.Enterprise{}, fmt.Errorf("error creating enterprise: %w", err) + return params.Enterprise{}, errors.Wrap(err, "creating enterprise") } - ret, err := s.GetEnterpriseByID(ctx, newEnterprise.ID.String()) + paramEnt, err = s.sqlToCommonEnterprise(newEnterprise, true) if err != nil { - return params.Enterprise{}, fmt.Errorf("error creating enterprise: %w", err) + return params.Enterprise{}, errors.Wrap(err, "creating enterprise") } - return ret, nil + return paramEnt, nil } func (s *sqlDatabase) GetEnterprise(ctx context.Context, name, endpointName string) (params.Enterprise, error) { enterprise, err := s.getEnterprise(ctx, name, endpointName) if err != nil { - return params.Enterprise{}, fmt.Errorf("error fetching enterprise: %w", err) + return params.Enterprise{}, errors.Wrap(err, "fetching enterprise") } param, err := s.sqlToCommonEnterprise(enterprise, true) if err != nil { - return params.Enterprise{}, fmt.Errorf("error fetching enterprise: %w", err) + return params.Enterprise{}, errors.Wrap(err, "fetching enterprise") } return param, nil } func (s *sqlDatabase) GetEnterpriseByID(ctx context.Context, enterpriseID string) (params.Enterprise, error) { - preloadList := []string{ - "Pools", - "Credentials", - "Endpoint", - "Credentials.Endpoint", - "Events", - } - enterprise, err := s.getEnterpriseByID(ctx, s.conn, enterpriseID, preloadList...) + enterprise, err := s.getEnterpriseByID(ctx, s.conn, enterpriseID, "Pools", "Credentials", "Endpoint") if err != nil { - return params.Enterprise{}, fmt.Errorf("error fetching enterprise: %w", err) + return params.Enterprise{}, errors.Wrap(err, "fetching enterprise") } param, err := s.sqlToCommonEnterprise(enterprise, true) if err != nil { - return params.Enterprise{}, fmt.Errorf("error fetching enterprise: %w", err) + return params.Enterprise{}, errors.Wrap(err, "fetching enterprise") } return param, nil } -func (s *sqlDatabase) ListEnterprises(_ context.Context, filter params.EnterpriseFilter) ([]params.Enterprise, error) { +func (s *sqlDatabase) ListEnterprises(_ context.Context) ([]params.Enterprise, error) { var enterprises []Enterprise q := s.conn. Preload("Credentials"). Preload("Credentials.Endpoint"). - Preload("Endpoint") - if filter.Name != "" { - q = q.Where("name = ?", filter.Name) - } - if filter.Endpoint != "" { - q = q.Where("endpoint_name = ?", filter.Endpoint) - } - q = q.Find(&enterprises) + Preload("Endpoint"). + Find(&enterprises) if q.Error != nil { - return []params.Enterprise{}, fmt.Errorf("error fetching enterprises: %w", q.Error) + return []params.Enterprise{}, errors.Wrap(q.Error, "fetching enterprises") } ret := make([]params.Enterprise, len(enterprises)) @@ -134,7 +127,7 @@ func (s *sqlDatabase) ListEnterprises(_ context.Context, filter params.Enterpris var err error ret[idx], err = s.sqlToCommonEnterprise(val, true) if err != nil { - return nil, fmt.Errorf("error fetching enterprises: %w", err) + return nil, errors.Wrap(err, "fetching enterprises") } } @@ -142,9 +135,12 @@ func (s *sqlDatabase) ListEnterprises(_ context.Context, filter params.Enterpris } func (s *sqlDatabase) DeleteEnterprise(ctx context.Context, enterpriseID string) error { + s.writeMux.Lock() + defer s.writeMux.Unlock() + enterprise, err := s.getEnterpriseByID(ctx, s.conn, enterpriseID, "Endpoint", "Credentials", "Credentials.Endpoint") if err != nil { - return fmt.Errorf("error fetching enterprise: %w", err) + return errors.Wrap(err, "fetching enterprise") } defer func(ent Enterprise) { @@ -160,13 +156,16 @@ func (s *sqlDatabase) DeleteEnterprise(ctx context.Context, enterpriseID string) q := s.conn.Unscoped().Delete(&enterprise) if q.Error != nil && !errors.Is(q.Error, gorm.ErrRecordNotFound) { - return fmt.Errorf("error deleting enterprise: %w", q.Error) + return errors.Wrap(q.Error, "deleting enterprise") } return nil } func (s *sqlDatabase) UpdateEnterprise(ctx context.Context, enterpriseID string, param params.UpdateEntityParams) (newParams params.Enterprise, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + defer func() { if err == nil { s.sendNotify(common.EnterpriseEntityType, common.UpdateOperation, newParams) @@ -178,31 +177,31 @@ func (s *sqlDatabase) UpdateEnterprise(ctx context.Context, enterpriseID string, var err error enterprise, err = s.getEnterpriseByID(ctx, tx, enterpriseID) if err != nil { - return fmt.Errorf("error fetching enterprise: %w", err) + return errors.Wrap(err, "fetching enterprise") } if enterprise.EndpointName == nil { - return fmt.Errorf("error enterprise has no endpoint: %w", runnerErrors.ErrUnprocessable) + return errors.Wrap(runnerErrors.ErrUnprocessable, "enterprise has no endpoint") } if param.CredentialsName != "" { creds, err = s.getGithubCredentialsByName(ctx, tx, param.CredentialsName, false) if err != nil { - return fmt.Errorf("error fetching credentials: %w", err) + return errors.Wrap(err, "fetching credentials") } if creds.EndpointName == nil { - return fmt.Errorf("error credentials have no endpoint: %w", runnerErrors.ErrUnprocessable) + return errors.Wrap(runnerErrors.ErrUnprocessable, "credentials have no endpoint") } if *creds.EndpointName != *enterprise.EndpointName { - return fmt.Errorf("error endpoint mismatch: %w", runnerErrors.ErrBadRequest) + return errors.Wrap(runnerErrors.ErrBadRequest, "endpoint mismatch") } enterprise.CredentialsID = &creds.ID } if param.WebhookSecret != "" { secret, err := util.Seal([]byte(param.WebhookSecret), []byte(s.cfg.Passphrase)) if err != nil { - return fmt.Errorf("error encoding secret: %w", err) + return errors.Wrap(err, "encoding secret") } enterprise.WebhookSecret = secret } @@ -213,22 +212,22 @@ func (s *sqlDatabase) UpdateEnterprise(ctx context.Context, enterpriseID string, q := tx.Save(&enterprise) if q.Error != nil { - return fmt.Errorf("error saving enterprise: %w", q.Error) + return errors.Wrap(q.Error, "saving enterprise") } return nil }) if err != nil { - return params.Enterprise{}, fmt.Errorf("error updating enterprise: %w", err) + return params.Enterprise{}, errors.Wrap(err, "updating enterprise") } enterprise, err = s.getEnterpriseByID(ctx, s.conn, enterpriseID, "Endpoint", "Credentials", "Credentials.Endpoint") if err != nil { - return params.Enterprise{}, fmt.Errorf("error updating enterprise: %w", err) + return params.Enterprise{}, errors.Wrap(err, "updating enterprise") } newParams, err = s.sqlToCommonEnterprise(enterprise, true) if err != nil { - return params.Enterprise{}, fmt.Errorf("error updating enterprise: %w", err) + return params.Enterprise{}, errors.Wrap(err, "updating enterprise") } return newParams, nil } @@ -245,7 +244,7 @@ func (s *sqlDatabase) getEnterprise(_ context.Context, name, endpointName string if errors.Is(q.Error, gorm.ErrRecordNotFound) { return Enterprise{}, runnerErrors.ErrNotFound } - return Enterprise{}, fmt.Errorf("error fetching enterprise from database: %w", q.Error) + return Enterprise{}, errors.Wrap(q.Error, "fetching enterprise from database") } return enterprise, nil } @@ -253,7 +252,7 @@ func (s *sqlDatabase) getEnterprise(_ context.Context, name, endpointName string func (s *sqlDatabase) getEnterpriseByID(_ context.Context, tx *gorm.DB, id string, preload ...string) (Enterprise, error) { u, err := uuid.Parse(id) if err != nil { - return Enterprise{}, fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) + return Enterprise{}, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") } var enterprise Enterprise @@ -269,7 +268,7 @@ func (s *sqlDatabase) getEnterpriseByID(_ context.Context, tx *gorm.DB, id strin if errors.Is(q.Error, gorm.ErrRecordNotFound) { return Enterprise{}, runnerErrors.ErrNotFound } - return Enterprise{}, fmt.Errorf("error fetching enterprise from database: %w", q.Error) + return Enterprise{}, errors.Wrap(q.Error, "fetching enterprise from database") } return enterprise, nil } diff --git a/database/sql/enterprise_test.go b/database/sql/enterprise_test.go index 9192a362..5da67b1d 100644 --- a/database/sql/enterprise_test.go +++ b/database/sql/enterprise_test.go @@ -53,11 +53,9 @@ type EnterpriseTestSuite struct { adminCtx context.Context adminUserID string - testCreds params.ForgeCredentials - ghesCreds params.ForgeCredentials - secondaryTestCreds params.ForgeCredentials - githubEndpoint params.ForgeEndpoint - ghesEndpoint params.ForgeEndpoint + testCreds params.GithubCredentials + secondaryTestCreds params.GithubCredentials + githubEndpoint params.GithubEndpoint } func (s *EnterpriseTestSuite) equalInstancesByName(expected, actual []params.Instance) { @@ -92,9 +90,7 @@ func (s *EnterpriseTestSuite) SetupTest() { s.Require().NotEmpty(s.adminUserID) s.githubEndpoint = garmTesting.CreateDefaultGithubEndpoint(adminCtx, db, s.T()) - s.ghesEndpoint = garmTesting.CreateGHESEndpoint(adminCtx, db, s.T()) s.testCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "new-creds", db, s.T(), s.githubEndpoint) - s.ghesCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "ghes-creds", db, s.T(), s.ghesEndpoint) s.secondaryTestCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "secondary-creds", db, s.T(), s.githubEndpoint) // create some enterprise objects in the database, for testing purposes @@ -103,7 +99,7 @@ func (s *EnterpriseTestSuite) SetupTest() { enterprise, err := db.CreateEnterprise( s.adminCtx, fmt.Sprintf("test-enterprise-%d", i), - s.testCreds, + s.testCreds.Name, fmt.Sprintf("test-webhook-secret-%d", i), params.PoolBalancerTypeRoundRobin, ) @@ -182,7 +178,7 @@ func (s *EnterpriseTestSuite) TestCreateEnterprise() { enterprise, err := s.Store.CreateEnterprise( s.adminCtx, s.Fixtures.CreateEnterpriseParams.Name, - s.testCreds, + s.Fixtures.CreateEnterpriseParams.CredentialsName, s.Fixtures.CreateEnterpriseParams.WebhookSecret, params.PoolBalancerTypeRoundRobin) @@ -213,16 +209,24 @@ func (s *EnterpriseTestSuite) TestCreateEnterpriseInvalidDBPassphrase() { _, err = sqlDB.CreateEnterprise( s.adminCtx, s.Fixtures.CreateEnterpriseParams.Name, - s.testCreds, + s.Fixtures.CreateEnterpriseParams.CredentialsName, s.Fixtures.CreateEnterpriseParams.WebhookSecret, params.PoolBalancerTypeRoundRobin) s.Require().NotNil(err) - s.Require().Equal("error encoding secret: invalid passphrase length (expected length 32 characters)", err.Error()) + s.Require().Equal("encoding secret: invalid passphrase length (expected length 32 characters)", err.Error()) } func (s *EnterpriseTestSuite) TestCreateEnterpriseDBCreateErr() { s.Fixtures.SQLMock.ExpectBegin() + s.Fixtures.SQLMock. + ExpectQuery(regexp.QuoteMeta("SELECT * FROM `github_credentials` WHERE user_id = ? AND name = ? AND `github_credentials`.`deleted_at` IS NULL ORDER BY `github_credentials`.`id` LIMIT ?")). + WithArgs(s.adminUserID, s.Fixtures.Enterprises[0].CredentialsName, 1). + WillReturnRows(sqlmock.NewRows([]string{"id", "endpoint_name"}).AddRow(s.testCreds.ID, s.testCreds.Endpoint.Name)) + s.Fixtures.SQLMock.ExpectQuery(regexp.QuoteMeta("SELECT * FROM `github_endpoints` WHERE `github_endpoints`.`name` = ? AND `github_endpoints`.`deleted_at` IS NULL")). + WithArgs(s.testCreds.Endpoint.Name). + WillReturnRows(sqlmock.NewRows([]string{"name"}). + AddRow(s.testCreds.Endpoint.Name)) s.Fixtures.SQLMock. ExpectExec(regexp.QuoteMeta("INSERT INTO `enterprises`")). WillReturnError(fmt.Errorf("creating enterprise mock error")) @@ -231,12 +235,12 @@ func (s *EnterpriseTestSuite) TestCreateEnterpriseDBCreateErr() { _, err := s.StoreSQLMocked.CreateEnterprise( s.adminCtx, s.Fixtures.CreateEnterpriseParams.Name, - s.testCreds, + s.Fixtures.CreateEnterpriseParams.CredentialsName, s.Fixtures.CreateEnterpriseParams.WebhookSecret, params.PoolBalancerTypeRoundRobin) s.Require().NotNil(err) - s.Require().Equal("error creating enterprise: error creating enterprise: creating enterprise mock error", err.Error()) + s.Require().Equal("creating enterprise: creating enterprise: creating enterprise mock error", err.Error()) s.assertSQLMockExpectations() } @@ -259,7 +263,7 @@ func (s *EnterpriseTestSuite) TestGetEnterpriseNotFound() { _, err := s.Store.GetEnterprise(s.adminCtx, "dummy-name", "github.com") s.Require().NotNil(err) - s.Require().Equal("error fetching enterprise: not found", err.Error()) + s.Require().Equal("fetching enterprise: not found", err.Error()) } func (s *EnterpriseTestSuite) TestGetEnterpriseDBDecryptingErr() { @@ -271,77 +275,27 @@ func (s *EnterpriseTestSuite) TestGetEnterpriseDBDecryptingErr() { _, err := s.StoreSQLMocked.GetEnterprise(s.adminCtx, s.Fixtures.Enterprises[0].Name, s.Fixtures.Enterprises[0].Endpoint.Name) s.Require().NotNil(err) - s.Require().Equal("error fetching enterprise: missing secret", err.Error()) + s.Require().Equal("fetching enterprise: missing secret", err.Error()) s.assertSQLMockExpectations() } func (s *EnterpriseTestSuite) TestListEnterprises() { - enterprises, err := s.Store.ListEnterprises(s.adminCtx, params.EnterpriseFilter{}) + enterprises, err := s.Store.ListEnterprises(s.adminCtx) s.Require().Nil(err) garmTesting.EqualDBEntityByName(s.T(), s.Fixtures.Enterprises, enterprises) } -func (s *EnterpriseTestSuite) TestListEnterprisesWithFilter() { - enterprise, err := s.Store.CreateEnterprise( - s.adminCtx, - "test-enterprise", - s.ghesCreds, - "test-secret", - params.PoolBalancerTypeRoundRobin, - ) - s.Require().NoError(err) - - enterprise2, err := s.Store.CreateEnterprise( - s.adminCtx, - "test-enterprise", - s.testCreds, - "test-secret", - params.PoolBalancerTypeRoundRobin, - ) - s.Require().NoError(err) - - enterprise3, err := s.Store.CreateEnterprise( - s.adminCtx, - "test-enterprise2", - s.testCreds, - "test-secret", - params.PoolBalancerTypeRoundRobin, - ) - s.Require().NoError(err) - enterprises, err := s.Store.ListEnterprises(s.adminCtx, params.EnterpriseFilter{ - Name: "test-enterprise", - }) - - s.Require().Nil(err) - garmTesting.EqualDBEntityByName(s.T(), []params.Enterprise{enterprise, enterprise2}, enterprises) - - enterprises, err = s.Store.ListEnterprises(s.adminCtx, params.EnterpriseFilter{ - Name: "test-enterprise", - Endpoint: s.ghesEndpoint.Name, - }) - - s.Require().Nil(err) - garmTesting.EqualDBEntityByName(s.T(), []params.Enterprise{enterprise}, enterprises) - - enterprises, err = s.Store.ListEnterprises(s.adminCtx, params.EnterpriseFilter{ - Name: "test-enterprise2", - }) - - s.Require().Nil(err) - garmTesting.EqualDBEntityByName(s.T(), []params.Enterprise{enterprise3}, enterprises) -} - func (s *EnterpriseTestSuite) TestListEnterprisesDBFetchErr() { s.Fixtures.SQLMock. ExpectQuery(regexp.QuoteMeta("SELECT * FROM `enterprises` WHERE `enterprises`.`deleted_at` IS NULL")). WillReturnError(fmt.Errorf("fetching user from database mock error")) - _, err := s.StoreSQLMocked.ListEnterprises(s.adminCtx, params.EnterpriseFilter{}) + _, err := s.StoreSQLMocked.ListEnterprises(s.adminCtx) s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("error fetching enterprises: fetching user from database mock error", err.Error()) + s.Require().Equal("fetching enterprises: fetching user from database mock error", err.Error()) } func (s *EnterpriseTestSuite) TestDeleteEnterprise() { @@ -350,14 +304,14 @@ func (s *EnterpriseTestSuite) TestDeleteEnterprise() { s.Require().Nil(err) _, err = s.Store.GetEnterpriseByID(s.adminCtx, s.Fixtures.Enterprises[0].ID) s.Require().NotNil(err) - s.Require().Equal("error fetching enterprise: not found", err.Error()) + s.Require().Equal("fetching enterprise: not found", err.Error()) } func (s *EnterpriseTestSuite) TestDeleteEnterpriseInvalidEnterpriseID() { err := s.Store.DeleteEnterprise(s.adminCtx, "dummy-enterprise-id") s.Require().NotNil(err) - s.Require().Equal("error fetching enterprise: error parsing id: invalid request", err.Error()) + s.Require().Equal("fetching enterprise: parsing id: invalid request", err.Error()) } func (s *EnterpriseTestSuite) TestDeleteEnterpriseDBDeleteErr() { @@ -375,7 +329,7 @@ func (s *EnterpriseTestSuite) TestDeleteEnterpriseDBDeleteErr() { err := s.StoreSQLMocked.DeleteEnterprise(s.adminCtx, s.Fixtures.Enterprises[0].ID) s.Require().NotNil(err) - s.Require().Equal("error deleting enterprise: mocked delete enterprise error", err.Error()) + s.Require().Equal("deleting enterprise: mocked delete enterprise error", err.Error()) s.assertSQLMockExpectations() } @@ -391,7 +345,7 @@ func (s *EnterpriseTestSuite) TestUpdateEnterpriseInvalidEnterpriseID() { _, err := s.Store.UpdateEnterprise(s.adminCtx, "dummy-enterprise-id", s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("error updating enterprise: error fetching enterprise: error parsing id: invalid request", err.Error()) + s.Require().Equal("updating enterprise: fetching enterprise: parsing id: invalid request", err.Error()) } func (s *EnterpriseTestSuite) TestUpdateEnterpriseDBEncryptErr() { @@ -416,7 +370,7 @@ func (s *EnterpriseTestSuite) TestUpdateEnterpriseDBEncryptErr() { _, err := s.StoreSQLMocked.UpdateEnterprise(s.adminCtx, s.Fixtures.Enterprises[0].ID, s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("error updating enterprise: error encoding secret: invalid passphrase length (expected length 32 characters)", err.Error()) + s.Require().Equal("updating enterprise: encoding secret: invalid passphrase length (expected length 32 characters)", err.Error()) s.assertSQLMockExpectations() } @@ -444,7 +398,7 @@ func (s *EnterpriseTestSuite) TestUpdateEnterpriseDBSaveErr() { _, err := s.StoreSQLMocked.UpdateEnterprise(s.adminCtx, s.Fixtures.Enterprises[0].ID, s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("error updating enterprise: error saving enterprise: saving enterprise mock error", err.Error()) + s.Require().Equal("updating enterprise: saving enterprise: saving enterprise mock error", err.Error()) s.assertSQLMockExpectations() } @@ -472,7 +426,7 @@ func (s *EnterpriseTestSuite) TestUpdateEnterpriseDBDecryptingErr() { _, err := s.StoreSQLMocked.UpdateEnterprise(s.adminCtx, s.Fixtures.Enterprises[0].ID, s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("error updating enterprise: error encoding secret: invalid passphrase length (expected length 32 characters)", err.Error()) + s.Require().Equal("updating enterprise: encoding secret: invalid passphrase length (expected length 32 characters)", err.Error()) s.assertSQLMockExpectations() } @@ -487,7 +441,7 @@ func (s *EnterpriseTestSuite) TestGetEnterpriseByIDInvalidEnterpriseID() { _, err := s.Store.GetEnterpriseByID(s.adminCtx, "dummy-enterprise-id") s.Require().NotNil(err) - s.Require().Equal("error fetching enterprise: error parsing id: invalid request", err.Error()) + s.Require().Equal("fetching enterprise: parsing id: invalid request", err.Error()) } func (s *EnterpriseTestSuite) TestGetEnterpriseByIDDBDecryptingErr() { @@ -495,10 +449,6 @@ func (s *EnterpriseTestSuite) TestGetEnterpriseByIDDBDecryptingErr() { ExpectQuery(regexp.QuoteMeta("SELECT * FROM `enterprises` WHERE id = ? AND `enterprises`.`deleted_at` IS NULL ORDER BY `enterprises`.`id` LIMIT ?")). WithArgs(s.Fixtures.Enterprises[0].ID, 1). WillReturnRows(sqlmock.NewRows([]string{"id"}).AddRow(s.Fixtures.Enterprises[0].ID)) - s.Fixtures.SQLMock. - ExpectQuery(regexp.QuoteMeta("SELECT * FROM `enterprise_events` WHERE `enterprise_events`.`enterprise_id` = ? AND `enterprise_events`.`deleted_at` IS NULL")). - WithArgs(s.Fixtures.Enterprises[0].ID). - WillReturnRows(sqlmock.NewRows([]string{"enterprise_id"}).AddRow(s.Fixtures.Enterprises[0].ID)) s.Fixtures.SQLMock. ExpectQuery(regexp.QuoteMeta("SELECT * FROM `pools` WHERE `pools`.`enterprise_id` = ? AND `pools`.`deleted_at` IS NULL")). WithArgs(s.Fixtures.Enterprises[0].ID). @@ -508,7 +458,7 @@ func (s *EnterpriseTestSuite) TestGetEnterpriseByIDDBDecryptingErr() { s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("error fetching enterprise: missing secret", err.Error()) + s.Require().Equal("fetching enterprise: missing secret", err.Error()) } func (s *EnterpriseTestSuite) TestCreateEnterprisePool() { @@ -540,14 +490,14 @@ func (s *EnterpriseTestSuite) TestCreateEnterprisePoolMissingTags() { } func (s *EnterpriseTestSuite) TestCreateEnterprisePoolInvalidEnterpriseID() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: "dummy-enterprise-id", - EntityType: params.ForgeEntityTypeEnterprise, + EntityType: params.GithubEntityTypeEnterprise, } _, err := s.Store.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("error parsing id: invalid request", err.Error()) + s.Require().Equal("parsing id: invalid request", err.Error()) } func (s *EnterpriseTestSuite) TestCreateEnterprisePoolDBFetchTagErr() { @@ -565,7 +515,7 @@ func (s *EnterpriseTestSuite) TestCreateEnterprisePoolDBFetchTagErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("error creating tag: error fetching tag from database: mocked fetching tag error", err.Error()) + s.Require().Equal("creating tag: fetching tag from database: mocked fetching tag error", err.Error()) s.assertSQLMockExpectations() } @@ -592,7 +542,7 @@ func (s *EnterpriseTestSuite) TestCreateEnterprisePoolDBAddingPoolErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("error creating pool: mocked adding pool error", err.Error()) + s.Require().Equal("creating pool: mocked adding pool error", err.Error()) s.assertSQLMockExpectations() } @@ -623,7 +573,7 @@ func (s *EnterpriseTestSuite) TestCreateEnterprisePoolDBSaveTagErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("error associating tags: mocked saving tag error", err.Error()) + s.Require().Equal("associating tags: mocked saving tag error", err.Error()) s.assertSQLMockExpectations() } @@ -663,7 +613,7 @@ func (s *EnterpriseTestSuite) TestCreateEnterprisePoolDBFetchPoolErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("error fetching pool by ID: not found", err.Error()) + s.Require().Equal("fetching pool: not found", err.Error()) s.assertSQLMockExpectations() } @@ -687,14 +637,14 @@ func (s *EnterpriseTestSuite) TestListEnterprisePools() { } func (s *EnterpriseTestSuite) TestListEnterprisePoolsInvalidEnterpriseID() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: "dummy-enterprise-id", - EntityType: params.ForgeEntityTypeEnterprise, + EntityType: params.GithubEntityTypeEnterprise, } _, err := s.Store.ListEntityPools(s.adminCtx, entity) s.Require().NotNil(err) - s.Require().Equal("error fetching pools: error parsing id: invalid request", err.Error()) + s.Require().Equal("fetching pools: parsing id: invalid request", err.Error()) } func (s *EnterpriseTestSuite) TestGetEnterprisePool() { @@ -712,14 +662,14 @@ func (s *EnterpriseTestSuite) TestGetEnterprisePool() { } func (s *EnterpriseTestSuite) TestGetEnterprisePoolInvalidEnterpriseID() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: "dummy-enterprise-id", - EntityType: params.ForgeEntityTypeEnterprise, + EntityType: params.GithubEntityTypeEnterprise, } _, err := s.Store.GetEntityPool(s.adminCtx, entity, "dummy-pool-id") s.Require().NotNil(err) - s.Require().Equal("fetching pool: error parsing id: invalid request", err.Error()) + s.Require().Equal("fetching pool: parsing id: invalid request", err.Error()) } func (s *EnterpriseTestSuite) TestDeleteEnterprisePool() { @@ -734,18 +684,18 @@ func (s *EnterpriseTestSuite) TestDeleteEnterprisePool() { s.Require().Nil(err) _, err = s.Store.GetEntityPool(s.adminCtx, entity, pool.ID) - s.Require().Equal("fetching pool: error finding pool: not found", err.Error()) + s.Require().Equal("fetching pool: finding pool: not found", err.Error()) } func (s *EnterpriseTestSuite) TestDeleteEnterprisePoolInvalidEnterpriseID() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: "dummy-enterprise-id", - EntityType: params.ForgeEntityTypeEnterprise, + EntityType: params.GithubEntityTypeEnterprise, } err := s.Store.DeleteEntityPool(s.adminCtx, entity, "dummy-pool-id") s.Require().NotNil(err) - s.Require().Equal("error parsing id: invalid request", err.Error()) + s.Require().Equal("parsing id: invalid request", err.Error()) } func (s *EnterpriseTestSuite) TestDeleteEnterprisePoolDBDeleteErr() { @@ -765,7 +715,7 @@ func (s *EnterpriseTestSuite) TestDeleteEnterprisePoolDBDeleteErr() { err = s.StoreSQLMocked.DeleteEntityPool(s.adminCtx, entity, pool.ID) s.Require().NotNil(err) - s.Require().Equal("error removing pool: mocked deleting pool error", err.Error()) + s.Require().Equal("removing pool: mocked deleting pool error", err.Error()) s.assertSQLMockExpectations() } @@ -793,14 +743,14 @@ func (s *EnterpriseTestSuite) TestListEnterpriseInstances() { } func (s *EnterpriseTestSuite) TestListEnterpriseInstancesInvalidEnterpriseID() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: "dummy-enterprise-id", - EntityType: params.ForgeEntityTypeEnterprise, + EntityType: params.GithubEntityTypeEnterprise, } _, err := s.Store.ListEntityInstances(s.adminCtx, entity) s.Require().NotNil(err) - s.Require().Equal("error fetching entity: error parsing id: invalid request", err.Error()) + s.Require().Equal("fetching entity: parsing id: invalid request", err.Error()) } func (s *EnterpriseTestSuite) TestUpdateEnterprisePool() { @@ -821,38 +771,17 @@ func (s *EnterpriseTestSuite) TestUpdateEnterprisePool() { } func (s *EnterpriseTestSuite) TestUpdateEnterprisePoolInvalidEnterpriseID() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: "dummy-enterprise-id", - EntityType: params.ForgeEntityTypeEnterprise, + EntityType: params.GithubEntityTypeEnterprise, } _, err := s.Store.UpdateEntityPool(s.adminCtx, entity, "dummy-pool-id", s.Fixtures.UpdatePoolParams) s.Require().NotNil(err) - s.Require().Equal("error fetching pool: error parsing id: invalid request", err.Error()) -} - -func (s *EnterpriseTestSuite) TestAddRepoEntityEvent() { - enterprise, err := s.Store.CreateEnterprise( - s.adminCtx, - s.Fixtures.CreateEnterpriseParams.Name, - s.testCreds, - s.Fixtures.CreateEnterpriseParams.WebhookSecret, - params.PoolBalancerTypeRoundRobin) - - s.Require().Nil(err) - entity, err := enterprise.GetEntity() - s.Require().Nil(err) - err = s.Store.AddEntityEvent(s.adminCtx, entity, params.StatusEvent, params.EventInfo, "this is a test", 20) - s.Require().Nil(err) - - enterprise, err = s.Store.GetEnterpriseByID(s.adminCtx, enterprise.ID) - s.Require().Nil(err) - s.Require().Equal(1, len(enterprise.Events)) - s.Require().Equal(params.StatusEvent, enterprise.Events[0].EventType) - s.Require().Equal(params.EventInfo, enterprise.Events[0].EventLevel) - s.Require().Equal("this is a test", enterprise.Events[0].Message) + s.Require().Equal("fetching pool: parsing id: invalid request", err.Error()) } func TestEnterpriseTestSuite(t *testing.T) { + t.Parallel() suite.Run(t, new(EnterpriseTestSuite)) } diff --git a/database/sql/gitea.go b/database/sql/gitea.go deleted file mode 100644 index a9edde09..00000000 --- a/database/sql/gitea.go +++ /dev/null @@ -1,486 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package sql - -import ( - "context" - "errors" - "fmt" - "log/slog" - - "gorm.io/gorm" - - runnerErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm/auth" - "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/params" -) - -func (s *sqlDatabase) CreateGiteaEndpoint(_ context.Context, param params.CreateGiteaEndpointParams) (ghEndpoint params.ForgeEndpoint, err error) { - defer func() { - if err == nil { - s.sendNotify(common.GithubEndpointEntityType, common.CreateOperation, ghEndpoint) - } - }() - var endpoint GithubEndpoint - err = s.conn.Transaction(func(tx *gorm.DB) error { - if err := tx.Where("name = ?", param.Name).First(&endpoint).Error; err == nil { - return fmt.Errorf("gitea endpoint already exists: %w", runnerErrors.ErrDuplicateEntity) - } - endpoint = GithubEndpoint{ - Name: param.Name, - Description: param.Description, - APIBaseURL: param.APIBaseURL, - BaseURL: param.BaseURL, - CACertBundle: param.CACertBundle, - EndpointType: params.GiteaEndpointType, - } - - if err := tx.Create(&endpoint).Error; err != nil { - return fmt.Errorf("error creating gitea endpoint: %w", err) - } - return nil - }) - if err != nil { - return params.ForgeEndpoint{}, fmt.Errorf("error creating gitea endpoint: %w", err) - } - ghEndpoint, err = s.sqlToCommonGithubEndpoint(endpoint) - if err != nil { - return params.ForgeEndpoint{}, fmt.Errorf("error converting gitea endpoint: %w", err) - } - return ghEndpoint, nil -} - -func (s *sqlDatabase) ListGiteaEndpoints(_ context.Context) ([]params.ForgeEndpoint, error) { - var endpoints []GithubEndpoint - err := s.conn.Where("endpoint_type = ?", params.GiteaEndpointType).Find(&endpoints).Error - if err != nil { - return nil, fmt.Errorf("error fetching gitea endpoints: %w", err) - } - - var ret []params.ForgeEndpoint - for _, ep := range endpoints { - commonEp, err := s.sqlToCommonGithubEndpoint(ep) - if err != nil { - return nil, fmt.Errorf("error converting gitea endpoint: %w", err) - } - ret = append(ret, commonEp) - } - return ret, nil -} - -func (s *sqlDatabase) UpdateGiteaEndpoint(_ context.Context, name string, param params.UpdateGiteaEndpointParams) (ghEndpoint params.ForgeEndpoint, err error) { - defer func() { - if err == nil { - s.sendNotify(common.GithubEndpointEntityType, common.UpdateOperation, ghEndpoint) - } - }() - var endpoint GithubEndpoint - err = s.conn.Transaction(func(tx *gorm.DB) error { - if err := tx.Where("name = ? and endpoint_type = ?", name, params.GiteaEndpointType).First(&endpoint).Error; err != nil { - if errors.Is(err, gorm.ErrRecordNotFound) { - return runnerErrors.NewNotFoundError("gitea endpoint %q not found", name) - } - return fmt.Errorf("error fetching gitea endpoint: %w", err) - } - - var credsCount int64 - if err := tx.Model(&GiteaCredentials{}).Where("endpoint_name = ?", endpoint.Name).Count(&credsCount).Error; err != nil { - if !errors.Is(err, gorm.ErrRecordNotFound) { - return fmt.Errorf("error fetching gitea credentials: %w", err) - } - } - if credsCount > 0 && (param.APIBaseURL != nil || param.BaseURL != nil) { - return runnerErrors.NewBadRequestError("cannot update endpoint URLs with existing credentials") - } - - if param.APIBaseURL != nil { - endpoint.APIBaseURL = *param.APIBaseURL - } - - if param.BaseURL != nil { - endpoint.BaseURL = *param.BaseURL - } - - if param.CACertBundle != nil { - endpoint.CACertBundle = param.CACertBundle - } - - if param.Description != nil { - endpoint.Description = *param.Description - } - - if err := tx.Save(&endpoint).Error; err != nil { - return fmt.Errorf("error updating gitea endpoint: %w", err) - } - - return nil - }) - if err != nil { - return params.ForgeEndpoint{}, fmt.Errorf("error updating gitea endpoint: %w", err) - } - ghEndpoint, err = s.sqlToCommonGithubEndpoint(endpoint) - if err != nil { - return params.ForgeEndpoint{}, fmt.Errorf("error converting gitea endpoint: %w", err) - } - return ghEndpoint, nil -} - -func (s *sqlDatabase) GetGiteaEndpoint(_ context.Context, name string) (params.ForgeEndpoint, error) { - var endpoint GithubEndpoint - err := s.conn.Where("name = ? and endpoint_type = ?", name, params.GiteaEndpointType).First(&endpoint).Error - if err != nil { - if errors.Is(err, gorm.ErrRecordNotFound) { - return params.ForgeEndpoint{}, runnerErrors.NewNotFoundError("gitea endpoint %q not found", name) - } - return params.ForgeEndpoint{}, fmt.Errorf("error fetching gitea endpoint: %w", err) - } - - return s.sqlToCommonGithubEndpoint(endpoint) -} - -func (s *sqlDatabase) DeleteGiteaEndpoint(_ context.Context, name string) (err error) { - defer func() { - if err == nil { - s.sendNotify(common.GithubEndpointEntityType, common.DeleteOperation, params.ForgeEndpoint{Name: name}) - } - }() - err = s.conn.Transaction(func(tx *gorm.DB) error { - var endpoint GithubEndpoint - if err := tx.Where("name = ? and endpoint_type = ?", name, params.GiteaEndpointType).First(&endpoint).Error; err != nil { - if errors.Is(err, gorm.ErrRecordNotFound) { - return nil - } - return fmt.Errorf("error fetching gitea endpoint: %w", err) - } - - var credsCount int64 - if err := tx.Model(&GiteaCredentials{}).Where("endpoint_name = ?", endpoint.Name).Count(&credsCount).Error; err != nil { - if !errors.Is(err, gorm.ErrRecordNotFound) { - return fmt.Errorf("error fetching gitea credentials: %w", err) - } - } - - var repoCnt int64 - if err := tx.Model(&Repository{}).Where("endpoint_name = ?", endpoint.Name).Count(&repoCnt).Error; err != nil { - if !errors.Is(err, gorm.ErrRecordNotFound) { - return fmt.Errorf("error fetching gitea repositories: %w", err) - } - } - - var orgCnt int64 - if err := tx.Model(&Organization{}).Where("endpoint_name = ?", endpoint.Name).Count(&orgCnt).Error; err != nil { - if !errors.Is(err, gorm.ErrRecordNotFound) { - return fmt.Errorf("error fetching gitea organizations: %w", err) - } - } - - if credsCount > 0 || repoCnt > 0 || orgCnt > 0 { - return runnerErrors.NewBadRequestError("cannot delete endpoint with associated entities") - } - - if err := tx.Unscoped().Delete(&endpoint).Error; err != nil { - return fmt.Errorf("error deleting gitea endpoint: %w", err) - } - return nil - }) - if err != nil { - return fmt.Errorf("error deleting gitea endpoint: %w", err) - } - return nil -} - -func (s *sqlDatabase) CreateGiteaCredentials(ctx context.Context, param params.CreateGiteaCredentialsParams) (gtCreds params.ForgeCredentials, err error) { - userID, err := getUIDFromContext(ctx) - if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error creating gitea credentials: %w", err) - } - if param.Endpoint == "" { - return params.ForgeCredentials{}, runnerErrors.NewBadRequestError("endpoint name is required") - } - - defer func() { - if err == nil { - s.sendNotify(common.GiteaCredentialsEntityType, common.CreateOperation, gtCreds) - } - }() - var creds GiteaCredentials - err = s.conn.Transaction(func(tx *gorm.DB) error { - var endpoint GithubEndpoint - if err := tx.Where("name = ? and endpoint_type = ?", param.Endpoint, params.GiteaEndpointType).First(&endpoint).Error; err != nil { - if errors.Is(err, gorm.ErrRecordNotFound) { - return runnerErrors.NewNotFoundError("gitea endpoint %q not found", param.Endpoint) - } - return fmt.Errorf("error fetching gitea endpoint: %w", err) - } - - if err := tx.Where("name = ? and user_id = ?", param.Name, userID).First(&creds).Error; err == nil { - return fmt.Errorf("gitea credentials already exists: %w", runnerErrors.ErrDuplicateEntity) - } - - var data []byte - var err error - switch param.AuthType { - case params.ForgeAuthTypePAT: - data, err = s.marshalAndSeal(param.PAT) - default: - return runnerErrors.NewBadRequestError("invalid auth type %q", param.AuthType) - } - if err != nil { - return fmt.Errorf("error marshaling and sealing credentials: %w", err) - } - - creds = GiteaCredentials{ - Name: param.Name, - Description: param.Description, - EndpointName: &endpoint.Name, - AuthType: param.AuthType, - Payload: data, - UserID: &userID, - } - - if err := tx.Create(&creds).Error; err != nil { - return fmt.Errorf("error creating gitea credentials: %w", err) - } - // Skip making an extra query. - creds.Endpoint = endpoint - - return nil - }) - if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error creating gitea credentials: %w", err) - } - gtCreds, err = s.sqlGiteaToCommonForgeCredentials(creds) - if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error converting gitea credentials: %w", err) - } - return gtCreds, nil -} - -func (s *sqlDatabase) getGiteaCredentialsByName(ctx context.Context, tx *gorm.DB, name string, detailed bool) (GiteaCredentials, error) { - var creds GiteaCredentials - q := tx.Preload("Endpoint") - - if detailed { - q = q. - Preload("Repositories"). - Preload("Organizations"). - Preload("Repositories.GiteaCredentials"). - Preload("Organizations.GiteaCredentials"). - Preload("Repositories.Credentials"). - Preload("Organizations.Credentials") - } - - userID, err := getUIDFromContext(ctx) - if err != nil { - return GiteaCredentials{}, fmt.Errorf("error fetching gitea credentials: %w", err) - } - q = q.Where("user_id = ?", userID) - - err = q.Where("name = ?", name).First(&creds).Error - if err != nil { - if errors.Is(err, gorm.ErrRecordNotFound) { - return GiteaCredentials{}, runnerErrors.NewNotFoundError("gitea credentials %q not found", name) - } - return GiteaCredentials{}, fmt.Errorf("error fetching gitea credentials: %w", err) - } - - return creds, nil -} - -func (s *sqlDatabase) GetGiteaCredentialsByName(ctx context.Context, name string, detailed bool) (params.ForgeCredentials, error) { - creds, err := s.getGiteaCredentialsByName(ctx, s.conn, name, detailed) - if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error fetching gitea credentials: %w", err) - } - - return s.sqlGiteaToCommonForgeCredentials(creds) -} - -func (s *sqlDatabase) GetGiteaCredentials(ctx context.Context, id uint, detailed bool) (params.ForgeCredentials, error) { - var creds GiteaCredentials - q := s.conn.Preload("Endpoint") - - if detailed { - q = q. - Preload("Repositories"). - Preload("Organizations"). - Preload("Repositories.GiteaCredentials"). - Preload("Organizations.GiteaCredentials"). - Preload("Repositories.Credentials"). - Preload("Organizations.Credentials") - } - - if !auth.IsAdmin(ctx) { - userID, err := getUIDFromContext(ctx) - if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error fetching gitea credentials: %w", err) - } - q = q.Where("user_id = ?", userID) - } - - err := q.Where("id = ?", id).First(&creds).Error - if err != nil { - if errors.Is(err, gorm.ErrRecordNotFound) { - return params.ForgeCredentials{}, runnerErrors.NewNotFoundError("gitea credentials not found") - } - return params.ForgeCredentials{}, fmt.Errorf("error fetching gitea credentials: %w", err) - } - - return s.sqlGiteaToCommonForgeCredentials(creds) -} - -func (s *sqlDatabase) ListGiteaCredentials(ctx context.Context) ([]params.ForgeCredentials, error) { - q := s.conn.Preload("Endpoint") - if !auth.IsAdmin(ctx) { - userID, err := getUIDFromContext(ctx) - if err != nil { - return nil, fmt.Errorf("error fetching gitea credentials: %w", err) - } - q = q.Where("user_id = ?", userID) - } - - var creds []GiteaCredentials - err := q.Preload("Endpoint").Find(&creds).Error - if err != nil { - return nil, fmt.Errorf("error fetching gitea credentials: %w", err) - } - - var ret []params.ForgeCredentials - for _, c := range creds { - commonCreds, err := s.sqlGiteaToCommonForgeCredentials(c) - if err != nil { - return nil, fmt.Errorf("error converting gitea credentials: %w", err) - } - ret = append(ret, commonCreds) - } - return ret, nil -} - -func (s *sqlDatabase) UpdateGiteaCredentials(ctx context.Context, id uint, param params.UpdateGiteaCredentialsParams) (gtCreds params.ForgeCredentials, err error) { - defer func() { - if err == nil { - s.sendNotify(common.GiteaCredentialsEntityType, common.UpdateOperation, gtCreds) - } - }() - var creds GiteaCredentials - err = s.conn.Transaction(func(tx *gorm.DB) error { - q := tx.Preload("Endpoint") - if !auth.IsAdmin(ctx) { - userID, err := getUIDFromContext(ctx) - if err != nil { - return fmt.Errorf("error updating gitea credentials: %w", err) - } - q = q.Where("user_id = ?", userID) - } - - if err := q.Where("id = ?", id).First(&creds).Error; err != nil { - if errors.Is(err, gorm.ErrRecordNotFound) { - return runnerErrors.NewNotFoundError("gitea credentials not found") - } - return fmt.Errorf("error fetching gitea credentials: %w", err) - } - - if param.Name != nil { - creds.Name = *param.Name - } - if param.Description != nil { - creds.Description = *param.Description - } - - var data []byte - var err error - switch creds.AuthType { - case params.ForgeAuthTypePAT: - if param.PAT != nil { - data, err = s.marshalAndSeal(param.PAT) - } - default: - return runnerErrors.NewBadRequestError("invalid auth type %q", creds.AuthType) - } - - if err != nil { - return fmt.Errorf("error marshaling and sealing credentials: %w", err) - } - if len(data) > 0 { - creds.Payload = data - } - - if err := tx.Save(&creds).Error; err != nil { - return fmt.Errorf("error updating gitea credentials: %w", err) - } - return nil - }) - if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error updating gitea credentials: %w", err) - } - - gtCreds, err = s.sqlGiteaToCommonForgeCredentials(creds) - if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error converting gitea credentials: %w", err) - } - return gtCreds, nil -} - -func (s *sqlDatabase) DeleteGiteaCredentials(ctx context.Context, id uint) (err error) { - var creds GiteaCredentials - defer func() { - if err == nil { - forgeCreds, innerErr := s.sqlGiteaToCommonForgeCredentials(creds) - if innerErr != nil { - slog.ErrorContext(ctx, "converting gitea credentials", "error", innerErr) - } - if creds.ID == 0 || creds.Name == "" { - return - } - s.sendNotify(common.GiteaCredentialsEntityType, common.DeleteOperation, forgeCreds) - } - }() - err = s.conn.Transaction(func(tx *gorm.DB) error { - q := tx.Where("id = ?", id). - Preload("Repositories"). - Preload("Organizations") - if !auth.IsAdmin(ctx) { - userID, err := getUIDFromContext(ctx) - if err != nil { - return fmt.Errorf("error deleting gitea credentials: %w", err) - } - q = q.Where("user_id = ?", userID) - } - - err := q.First(&creds).Error - if err != nil { - if errors.Is(err, gorm.ErrRecordNotFound) { - return nil - } - return fmt.Errorf("error fetching gitea credentials: %w", err) - } - - if len(creds.Repositories) > 0 { - return runnerErrors.NewBadRequestError("cannot delete credentials with repositories") - } - if len(creds.Organizations) > 0 { - return runnerErrors.NewBadRequestError("cannot delete credentials with organizations") - } - if err := tx.Unscoped().Delete(&creds).Error; err != nil { - return fmt.Errorf("error deleting gitea credentials: %w", err) - } - return nil - }) - if err != nil { - return fmt.Errorf("error deleting gitea credentials: %w", err) - } - return nil -} diff --git a/database/sql/gitea_test.go b/database/sql/gitea_test.go deleted file mode 100644 index dff5c471..00000000 --- a/database/sql/gitea_test.go +++ /dev/null @@ -1,848 +0,0 @@ -// Copyright 2024 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package sql - -import ( - "context" - "fmt" - "os" - "testing" - - "github.com/stretchr/testify/suite" - - runnerErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm/auth" - "github.com/cloudbase/garm/database/common" - garmTesting "github.com/cloudbase/garm/internal/testing" - "github.com/cloudbase/garm/params" -) - -type GiteaTestSuite struct { - suite.Suite - - giteaEndpoint params.ForgeEndpoint - db common.Store -} - -func (s *GiteaTestSuite) SetupTest() { - db, err := NewSQLDatabase(context.Background(), garmTesting.GetTestSqliteDBConfig(s.T())) - if err != nil { - s.FailNow(fmt.Sprintf("failed to create db connection: %s", err)) - } - - s.db = db - - createEpParams := params.CreateGiteaEndpointParams{ - Name: testEndpointName, - Description: testEndpointDescription, - APIBaseURL: testAPIBaseURL, - BaseURL: testBaseURL, - } - endpoint, err := s.db.CreateGiteaEndpoint(context.Background(), createEpParams) - s.Require().NoError(err) - s.Require().NotNil(endpoint) - s.Require().Equal(testEndpointName, endpoint.Name) - s.giteaEndpoint = endpoint -} - -func (s *GiteaTestSuite) TestCreatingEndpoint() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - createEpParams := params.CreateGiteaEndpointParams{ - Name: alternetTestEndpointName, - Description: testEndpointDescription, - APIBaseURL: testAPIBaseURL, - BaseURL: testBaseURL, - } - - endpoint, err := s.db.CreateGiteaEndpoint(ctx, createEpParams) - s.Require().NoError(err) - s.Require().NotNil(endpoint) - s.Require().Equal(alternetTestEndpointName, endpoint.Name) -} - -func (s *GiteaTestSuite) TestCreatingDuplicateEndpointFails() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - createEpParams := params.CreateGiteaEndpointParams{ - Name: alternetTestEndpointName, - Description: testEndpointDescription, - APIBaseURL: testAPIBaseURL, - BaseURL: testBaseURL, - } - - _, err := s.db.CreateGiteaEndpoint(ctx, createEpParams) - s.Require().NoError(err) - - _, err = s.db.CreateGiteaEndpoint(ctx, createEpParams) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrDuplicateEntity) -} - -func (s *GiteaTestSuite) TestGetEndpoint() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - createEpParams := params.CreateGiteaEndpointParams{ - Name: alternetTestEndpointName, - Description: testEndpointDescription, - APIBaseURL: testAPIBaseURL, - BaseURL: testBaseURL, - } - - newEndpoint, err := s.db.CreateGiteaEndpoint(ctx, createEpParams) - s.Require().NoError(err) - - endpoint, err := s.db.GetGiteaEndpoint(ctx, createEpParams.Name) - s.Require().NoError(err) - s.Require().NotNil(endpoint) - s.Require().Equal(newEndpoint.Name, endpoint.Name) -} - -func (s *GiteaTestSuite) TestGetNonExistingEndpointFailsWithNotFoundError() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - _, err := s.db.GetGiteaEndpoint(ctx, "non-existing") - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrNotFound) -} - -func (s *GiteaTestSuite) TestDeletingNonExistingEndpointIsANoop() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - err := s.db.DeleteGiteaEndpoint(ctx, "non-existing") - s.Require().NoError(err) -} - -func (s *GiteaTestSuite) TestDeletingEndpoint() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - createEpParams := params.CreateGiteaEndpointParams{ - Name: alternetTestEndpointName, - Description: testEndpointDescription, - APIBaseURL: testAPIBaseURL, - BaseURL: testBaseURL, - } - - endpoint, err := s.db.CreateGiteaEndpoint(ctx, createEpParams) - s.Require().NoError(err) - s.Require().NotNil(endpoint) - - err = s.db.DeleteGiteaEndpoint(ctx, alternetTestEndpointName) - s.Require().NoError(err) - - _, err = s.db.GetGiteaEndpoint(ctx, alternetTestEndpointName) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrNotFound) -} - -func (s *GiteaTestSuite) TestUpdateEndpoint() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - createEpParams := params.CreateGiteaEndpointParams{ - Name: "deleteme", - Description: testEndpointDescription, - APIBaseURL: testAPIBaseURL, - BaseURL: testBaseURL, - } - - endpoint, err := s.db.CreateGiteaEndpoint(ctx, createEpParams) - s.Require().NoError(err) - s.Require().NotNil(endpoint) - - newDescription := "another description" - newAPIBaseURL := "https://updated.example.com" - newBaseURL := "https://updated.example.com" - caCertBundle, err := os.ReadFile("../../testdata/certs/srv-pub.pem") - s.Require().NoError(err) - updateEpParams := params.UpdateGiteaEndpointParams{ - Description: &newDescription, - APIBaseURL: &newAPIBaseURL, - BaseURL: &newBaseURL, - CACertBundle: caCertBundle, - } - - updatedEndpoint, err := s.db.UpdateGiteaEndpoint(ctx, testEndpointName, updateEpParams) - s.Require().NoError(err) - s.Require().NotNil(updatedEndpoint) - s.Require().Equal(newDescription, updatedEndpoint.Description) - s.Require().Equal(newAPIBaseURL, updatedEndpoint.APIBaseURL) - s.Require().Equal(newBaseURL, updatedEndpoint.BaseURL) - s.Require().Equal(caCertBundle, updatedEndpoint.CACertBundle) -} - -func (s *GiteaTestSuite) TestUpdatingNonExistingEndpointReturnsNotFoundError() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - newDescription := "test desc" - updateEpParams := params.UpdateGiteaEndpointParams{ - Description: &newDescription, - } - - _, err := s.db.UpdateGiteaEndpoint(ctx, "non-existing", updateEpParams) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrNotFound) -} - -func (s *GiteaTestSuite) TestListEndpoints() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - createEpParams := params.CreateGiteaEndpointParams{ - Name: alternetTestEndpointName, - Description: testEndpointDescription, - APIBaseURL: testAPIBaseURL, - BaseURL: testBaseURL, - } - - _, err := s.db.CreateGiteaEndpoint(ctx, createEpParams) - s.Require().NoError(err) - - endpoints, err := s.db.ListGiteaEndpoints(ctx) - s.Require().NoError(err) - s.Require().Len(endpoints, 2) -} - -func (s *GiteaTestSuite) TestCreateCredentialsFailsWithUnauthorizedForAnonUser() { - ctx := context.Background() - - _, err := s.db.CreateGiteaCredentials(ctx, params.CreateGiteaCredentialsParams{}) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrUnauthorized) -} - -func (s *GiteaTestSuite) TestCreateCredentialsFailsWhenEndpointNameIsEmpty() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - _, err := s.db.CreateGiteaCredentials(ctx, params.CreateGiteaCredentialsParams{}) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - s.Require().Regexp("endpoint name is required", err.Error()) -} - -func (s *GiteaTestSuite) TestCreateCredentialsFailsWhenEndpointDoesNotExist() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - _, err := s.db.CreateGiteaCredentials(ctx, params.CreateGiteaCredentialsParams{Endpoint: "non-existing"}) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrNotFound) - s.Require().Regexp("error creating gitea credentials: gitea endpoint \"non-existing\" not found", err.Error()) -} - -func (s *GiteaTestSuite) TestCreateCredentialsFailsWhenAuthTypeIsInvalid() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - _, err := s.db.CreateGiteaCredentials(ctx, params.CreateGiteaCredentialsParams{Endpoint: s.giteaEndpoint.Name, AuthType: "invalid"}) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - s.Require().Regexp("invalid auth type", err.Error()) -} - -func (s *GiteaTestSuite) TestCreateCredentials() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - credParams := params.CreateGiteaCredentialsParams{ - Name: testCredsName, - Description: testCredsDescription, - Endpoint: s.giteaEndpoint.Name, - AuthType: params.ForgeAuthTypePAT, - PAT: params.GithubPAT{ - OAuth2Token: "test", - }, - } - - creds, err := s.db.CreateGiteaCredentials(ctx, credParams) - s.Require().NoError(err) - s.Require().NotNil(creds) - s.Require().Equal(credParams.Name, creds.Name) - s.Require().Equal(credParams.Description, creds.Description) - s.Require().Equal(credParams.Endpoint, creds.Endpoint.Name) - s.Require().Equal(credParams.AuthType, creds.AuthType) -} - -func (s *GiteaTestSuite) TestCreateCredentialsFailsOnDuplicateCredentials() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - testUser := garmTesting.CreateGARMTestUser(ctx, "testuser", s.db, s.T()) - testUserCtx := auth.PopulateContext(context.Background(), testUser, nil) - - credParams := params.CreateGiteaCredentialsParams{ - Name: testCredsName, - Description: testCredsDescription, - Endpoint: s.giteaEndpoint.Name, - AuthType: params.ForgeAuthTypePAT, - PAT: params.GithubPAT{ - OAuth2Token: "test", - }, - } - - _, err := s.db.CreateGiteaCredentials(ctx, credParams) - s.Require().NoError(err) - - // Creating creds with the same parameters should fail for the same user. - _, err = s.db.CreateGiteaCredentials(ctx, credParams) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrDuplicateEntity) - - // Creating creds with the same parameters should work for different users. - _, err = s.db.CreateGiteaCredentials(testUserCtx, credParams) - s.Require().NoError(err) -} - -func (s *GiteaTestSuite) TestNormalUsersCanOnlySeeTheirOwnCredentialsAdminCanSeeAll() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - testUser := garmTesting.CreateGARMTestUser(ctx, "testuser1", s.db, s.T()) - testUser2 := garmTesting.CreateGARMTestUser(ctx, "testuser2", s.db, s.T()) - testUserCtx := auth.PopulateContext(context.Background(), testUser, nil) - testUser2Ctx := auth.PopulateContext(context.Background(), testUser2, nil) - - credParams := params.CreateGiteaCredentialsParams{ - Name: testCredsName, - Description: testCredsDescription, - Endpoint: s.giteaEndpoint.Name, - AuthType: params.ForgeAuthTypePAT, - PAT: params.GithubPAT{ - OAuth2Token: "test", - }, - } - - creds, err := s.db.CreateGiteaCredentials(ctx, credParams) - s.Require().NoError(err) - s.Require().NotNil(creds) - - credParams.Name = "test-creds2" - creds2, err := s.db.CreateGiteaCredentials(testUserCtx, credParams) - s.Require().NoError(err) - s.Require().NotNil(creds2) - - credParams.Name = "test-creds3" - creds3, err := s.db.CreateGiteaCredentials(testUser2Ctx, credParams) - s.Require().NoError(err) - s.Require().NotNil(creds3) - - credsList, err := s.db.ListGiteaCredentials(ctx) - s.Require().NoError(err) - s.Require().Len(credsList, 3) - - credsList, err = s.db.ListGiteaCredentials(testUserCtx) - s.Require().NoError(err) - s.Require().Len(credsList, 1) - s.Require().Equal("test-creds2", credsList[0].Name) - - credsList, err = s.db.ListGiteaCredentials(testUser2Ctx) - s.Require().NoError(err) - s.Require().Len(credsList, 1) - s.Require().Equal("test-creds3", credsList[0].Name) -} - -func (s *GiteaTestSuite) TestGetGiteaCredentialsFailsWhenCredentialsDontExist() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - _, err := s.db.GetGiteaCredentials(ctx, 1, true) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrNotFound) - - _, err = s.db.GetGiteaCredentialsByName(ctx, "non-existing", true) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrNotFound) -} - -func (s *GiteaTestSuite) TestGetGithubCredentialsByNameReturnsOnlyCurrentUserCredentials() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - testUser := garmTesting.CreateGARMTestUser(ctx, "test-user1", s.db, s.T()) - testUserCtx := auth.PopulateContext(context.Background(), testUser, nil) - - credParams := params.CreateGiteaCredentialsParams{ - Name: testCredsName, - Description: testCredsDescription, - Endpoint: s.giteaEndpoint.Name, - AuthType: params.ForgeAuthTypePAT, - PAT: params.GithubPAT{ - OAuth2Token: "test", - }, - } - - creds, err := s.db.CreateGiteaCredentials(ctx, credParams) - s.Require().NoError(err) - s.Require().NotNil(creds) - - creds2, err := s.db.CreateGiteaCredentials(testUserCtx, credParams) - s.Require().NoError(err) - s.Require().NotNil(creds2) - - creds2Get, err := s.db.GetGiteaCredentialsByName(testUserCtx, testCredsName, true) - s.Require().NoError(err) - s.Require().NotNil(creds2) - s.Require().Equal(testCredsName, creds2Get.Name) - s.Require().Equal(creds2.ID, creds2Get.ID) - - credsGet, err := s.db.GetGiteaCredentialsByName(ctx, testCredsName, true) - s.Require().NoError(err) - s.Require().NotNil(creds) - s.Require().Equal(testCredsName, credsGet.Name) - s.Require().Equal(creds.ID, credsGet.ID) - - // Admin can get any creds by ID - credsGet, err = s.db.GetGiteaCredentials(ctx, creds2.ID, true) - s.Require().NoError(err) - s.Require().NotNil(creds2) - s.Require().Equal(creds2.ID, credsGet.ID) - - // Normal user cannot get other user creds by ID - _, err = s.db.GetGiteaCredentials(testUserCtx, creds.ID, true) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrNotFound) -} - -func (s *GiteaTestSuite) TestGetGithubCredentials() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - credParams := params.CreateGiteaCredentialsParams{ - Name: testCredsName, - Description: testCredsDescription, - Endpoint: s.giteaEndpoint.Name, - AuthType: params.ForgeAuthTypePAT, - PAT: params.GithubPAT{ - OAuth2Token: "test", - }, - } - - creds, err := s.db.CreateGiteaCredentials(ctx, credParams) - s.Require().NoError(err) - s.Require().NotNil(creds) - - creds2, err := s.db.GetGiteaCredentialsByName(ctx, testCredsName, true) - s.Require().NoError(err) - s.Require().NotNil(creds2) - s.Require().Equal(creds.Name, creds2.Name) - s.Require().Equal(creds.ID, creds2.ID) - - creds2, err = s.db.GetGiteaCredentials(ctx, creds.ID, true) - s.Require().NoError(err) - s.Require().NotNil(creds2) - s.Require().Equal(creds.Name, creds2.Name) - s.Require().Equal(creds.ID, creds2.ID) -} - -func (s *GiteaTestSuite) TestDeleteGiteaCredentials() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - credParams := params.CreateGiteaCredentialsParams{ - Name: testCredsName, - Description: testCredsDescription, - Endpoint: s.giteaEndpoint.Name, - AuthType: params.ForgeAuthTypePAT, - PAT: params.GithubPAT{ - OAuth2Token: "test", - }, - } - - creds, err := s.db.CreateGiteaCredentials(ctx, credParams) - s.Require().NoError(err) - s.Require().NotNil(creds) - - err = s.db.DeleteGiteaCredentials(ctx, creds.ID) - s.Require().NoError(err) - - _, err = s.db.GetGiteaCredentials(ctx, creds.ID, true) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrNotFound) -} - -func (s *GiteaTestSuite) TestDeleteGiteaCredentialsByNonAdminUser() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - testUser := garmTesting.CreateGARMTestUser(ctx, "test-user4", s.db, s.T()) - testUserCtx := auth.PopulateContext(context.Background(), testUser, nil) - - credParams := params.CreateGiteaCredentialsParams{ - Name: testCredsName, - Description: testCredsDescription, - Endpoint: s.giteaEndpoint.Name, - AuthType: params.ForgeAuthTypePAT, - PAT: params.GithubPAT{ - OAuth2Token: "test-creds4", - }, - } - - // Create creds as admin - creds, err := s.db.CreateGiteaCredentials(ctx, credParams) - s.Require().NoError(err) - s.Require().NotNil(creds) - - // Deleting non existent creds will return a nil error. For the test user - // the creds created by the admin should not be visible, which leads to not found - // which in turn returns no error. - err = s.db.DeleteGiteaCredentials(testUserCtx, creds.ID) - s.Require().NoError(err) - - // Check that the creds created by the admin are still there. - credsGet, err := s.db.GetGiteaCredentials(ctx, creds.ID, true) - s.Require().NoError(err) - s.Require().NotNil(credsGet) - s.Require().Equal(creds.ID, credsGet.ID) - - // Create the same creds with the test user. - creds2, err := s.db.CreateGiteaCredentials(testUserCtx, credParams) - s.Require().NoError(err) - s.Require().NotNil(creds2) - - // Remove creds created by test user. - err = s.db.DeleteGiteaCredentials(testUserCtx, creds2.ID) - s.Require().NoError(err) - - // The creds created by the test user should be gone. - _, err = s.db.GetGiteaCredentials(testUserCtx, creds2.ID, true) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrNotFound) -} - -func (s *GiteaTestSuite) TestDeleteCredentialsFailsIfReposOrgsOrEntitiesUseIt() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - credParams := params.CreateGiteaCredentialsParams{ - Name: testCredsName, - Description: testCredsDescription, - Endpoint: s.giteaEndpoint.Name, - AuthType: params.ForgeAuthTypePAT, - PAT: params.GithubPAT{ - OAuth2Token: "test", - }, - } - - creds, err := s.db.CreateGiteaCredentials(ctx, credParams) - s.Require().NoError(err) - s.Require().NotNil(creds) - - repo, err := s.db.CreateRepository(ctx, "test-owner", "test-repo", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) - s.Require().NoError(err) - s.Require().NotNil(repo) - - err = s.db.DeleteGiteaCredentials(ctx, creds.ID) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - - err = s.db.DeleteRepository(ctx, repo.ID) - s.Require().NoError(err) - - org, err := s.db.CreateOrganization(ctx, "test-org", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) - s.Require().NoError(err) - s.Require().NotNil(org) - - err = s.db.DeleteGiteaCredentials(ctx, creds.ID) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - - err = s.db.DeleteOrganization(ctx, org.ID) - s.Require().NoError(err) - - enterprise, err := s.db.CreateEnterprise(ctx, "test-enterprise", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) - s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - s.Require().Equal(params.Enterprise{}, enterprise) - - err = s.db.DeleteGiteaCredentials(ctx, creds.ID) - s.Require().NoError(err) - - _, err = s.db.GetGiteaCredentials(ctx, creds.ID, true) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrNotFound) -} - -func (s *GiteaTestSuite) TestUpdateCredentials() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - credParams := params.CreateGiteaCredentialsParams{ - Name: testCredsName, - Description: testCredsDescription, - Endpoint: s.giteaEndpoint.Name, - AuthType: params.ForgeAuthTypePAT, - PAT: params.GithubPAT{ - OAuth2Token: "test", - }, - } - - creds, err := s.db.CreateGiteaCredentials(ctx, credParams) - s.Require().NoError(err) - s.Require().NotNil(creds) - - newDescription := "just a description" - newName := "new-name" - newToken := "new-token" - updateCredParams := params.UpdateGiteaCredentialsParams{ - Description: &newDescription, - Name: &newName, - PAT: ¶ms.GithubPAT{ - OAuth2Token: newToken, - }, - } - - updatedCreds, err := s.db.UpdateGiteaCredentials(ctx, creds.ID, updateCredParams) - s.Require().NoError(err) - s.Require().NotNil(updatedCreds) - s.Require().Equal(newDescription, updatedCreds.Description) - s.Require().Equal(newName, updatedCreds.Name) -} - -func (s *GiteaTestSuite) TestUpdateCredentialsFailsForNonExistingCredentials() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - updateCredParams := params.UpdateGiteaCredentialsParams{ - Description: nil, - } - - _, err := s.db.UpdateGiteaCredentials(ctx, 1, updateCredParams) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrNotFound) -} - -func (s *GiteaTestSuite) TestUpdateCredentialsFailsIfCredentialsAreOwnedByNonAdminUser() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - testUser := garmTesting.CreateGARMTestUser(ctx, "test-user5", s.db, s.T()) - testUserCtx := auth.PopulateContext(context.Background(), testUser, nil) - - credParams := params.CreateGiteaCredentialsParams{ - Name: testCredsName, - Description: testCredsDescription, - Endpoint: s.giteaEndpoint.Name, - AuthType: params.ForgeAuthTypePAT, - PAT: params.GithubPAT{ - OAuth2Token: "test-creds5", - }, - } - - creds, err := s.db.CreateGiteaCredentials(ctx, credParams) - s.Require().NoError(err) - s.Require().NotNil(creds) - - newDescription := "new params desc" - updateCredParams := params.UpdateGiteaCredentialsParams{ - Description: &newDescription, - } - - _, err = s.db.UpdateGiteaCredentials(testUserCtx, creds.ID, updateCredParams) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrNotFound) -} - -func (s *GiteaTestSuite) TestAdminUserCanUpdateAnyGiteaCredentials() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - testUser := garmTesting.CreateGARMTestUser(ctx, "test-user5", s.db, s.T()) - testUserCtx := auth.PopulateContext(context.Background(), testUser, nil) - - credParams := params.CreateGiteaCredentialsParams{ - Name: testCredsName, - Description: testCredsDescription, - Endpoint: s.giteaEndpoint.Name, - AuthType: params.ForgeAuthTypePAT, - PAT: params.GithubPAT{ - OAuth2Token: "test-creds5", - }, - } - - creds, err := s.db.CreateGiteaCredentials(testUserCtx, credParams) - s.Require().NoError(err) - s.Require().NotNil(creds) - - newDescription := "another new description" - updateCredParams := params.UpdateGiteaCredentialsParams{ - Description: &newDescription, - } - - newCreds, err := s.db.UpdateGiteaCredentials(ctx, creds.ID, updateCredParams) - s.Require().NoError(err) - s.Require().Equal(newDescription, newCreds.Description) -} - -func (s *GiteaTestSuite) TestDeleteCredentialsWithOrgsOrReposFails() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - credParams := params.CreateGiteaCredentialsParams{ - Name: testCredsName, - Description: testCredsDescription, - Endpoint: s.giteaEndpoint.Name, - AuthType: params.ForgeAuthTypePAT, - PAT: params.GithubPAT{ - OAuth2Token: "test-creds5", - }, - } - - creds, err := s.db.CreateGiteaCredentials(ctx, credParams) - s.Require().NoError(err) - s.Require().NotNil(creds) - - repo, err := s.db.CreateRepository(ctx, "test-owner", "test-repo", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) - s.Require().NoError(err) - s.Require().NotNil(repo) - - err = s.db.DeleteGiteaCredentials(ctx, creds.ID) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - - err = s.db.DeleteRepository(ctx, repo.ID) - s.Require().NoError(err) - - org, err := s.db.CreateOrganization(ctx, "test-org", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) - s.Require().NoError(err) - s.Require().NotNil(org) - - err = s.db.DeleteGiteaCredentials(ctx, creds.ID) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - - err = s.db.DeleteOrganization(ctx, org.ID) - s.Require().NoError(err) - - err = s.db.DeleteGiteaCredentials(ctx, creds.ID) - s.Require().NoError(err) - - _, err = s.db.GetGiteaCredentials(ctx, creds.ID, true) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrNotFound) -} - -func (s *GiteaTestSuite) TestDeleteGiteaEndpointFailsWithOrgsReposOrCredentials() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - endpointParams := params.CreateGiteaEndpointParams{ - Name: "deleteme", - Description: testEndpointDescription, - APIBaseURL: testAPIBaseURL, - BaseURL: testBaseURL, - } - - ep, err := s.db.CreateGiteaEndpoint(ctx, endpointParams) - s.Require().NoError(err) - s.Require().NotNil(ep) - - credParams := params.CreateGiteaCredentialsParams{ - Name: testCredsName, - Description: testCredsDescription, - Endpoint: ep.Name, - AuthType: params.ForgeAuthTypePAT, - PAT: params.GithubPAT{ - OAuth2Token: "test-creds5", - }, - } - - creds, err := s.db.CreateGiteaCredentials(ctx, credParams) - s.Require().NoError(err) - s.Require().NotNil(creds) - - repo, err := s.db.CreateRepository(ctx, "test-owner", "test-repo", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) - s.Require().NoError(err) - s.Require().NotNil(repo) - - badRequest := &runnerErrors.BadRequestError{} - err = s.db.DeleteGiteaEndpoint(ctx, ep.Name) - s.Require().Error(err) - s.Require().ErrorAs(err, &badRequest) - - err = s.db.DeleteRepository(ctx, repo.ID) - s.Require().NoError(err) - - org, err := s.db.CreateOrganization(ctx, "test-org", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) - s.Require().NoError(err) - s.Require().NotNil(org) - - err = s.db.DeleteGiteaEndpoint(ctx, ep.Name) - s.Require().Error(err) - s.Require().ErrorAs(err, &badRequest) - - err = s.db.DeleteOrganization(ctx, org.ID) - s.Require().NoError(err) - - err = s.db.DeleteGiteaCredentials(ctx, creds.ID) - s.Require().NoError(err) - - err = s.db.DeleteGiteaEndpoint(ctx, ep.Name) - s.Require().NoError(err) - - _, err = s.db.GetGiteaEndpoint(ctx, ep.Name) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrNotFound) -} - -func (s *GiteaTestSuite) TestUpdateEndpointURLsFailsIfCredentialsAreAssociated() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - createEpParams := params.CreateGiteaEndpointParams{ - Name: "deleteme", - Description: testEndpointDescription, - APIBaseURL: testAPIBaseURL, - BaseURL: testBaseURL, - } - - endpoint, err := s.db.CreateGiteaEndpoint(ctx, createEpParams) - s.Require().NoError(err) - s.Require().NotNil(endpoint) - - credParams := params.CreateGiteaCredentialsParams{ - Name: testCredsName, - Description: testCredsDescription, - Endpoint: testEndpointName, - AuthType: params.ForgeAuthTypePAT, - PAT: params.GithubPAT{ - OAuth2Token: "test", - }, - } - - _, err = s.db.CreateGiteaCredentials(ctx, credParams) - s.Require().NoError(err) - - newDescription := "new gitea description" - newBaseURL := "https://new-gitea.example.com" - newAPIBaseURL := "https://new-gotea.example.com" - updateEpParams := params.UpdateGiteaEndpointParams{ - BaseURL: &newBaseURL, - } - - _, err = s.db.UpdateGiteaEndpoint(ctx, testEndpointName, updateEpParams) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - s.Require().EqualError(err, "error updating gitea endpoint: cannot update endpoint URLs with existing credentials") - - updateEpParams = params.UpdateGiteaEndpointParams{ - APIBaseURL: &newAPIBaseURL, - } - _, err = s.db.UpdateGiteaEndpoint(ctx, testEndpointName, updateEpParams) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - s.Require().EqualError(err, "error updating gitea endpoint: cannot update endpoint URLs with existing credentials") - - updateEpParams = params.UpdateGiteaEndpointParams{ - Description: &newDescription, - } - ret, err := s.db.UpdateGiteaEndpoint(ctx, testEndpointName, updateEpParams) - s.Require().NoError(err) - s.Require().Equal(newDescription, ret.Description) -} - -func (s *GiteaTestSuite) TestListGiteaEndpoints() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - createEpParams := params.CreateGiteaEndpointParams{ - Name: "deleteme", - Description: testEndpointDescription, - APIBaseURL: testAPIBaseURL, - BaseURL: testBaseURL, - } - - _, err := s.db.CreateGiteaEndpoint(ctx, createEpParams) - s.Require().NoError(err) - - endpoints, err := s.db.ListGiteaEndpoints(ctx) - s.Require().NoError(err) - s.Require().Len(endpoints, 2) -} - -func TestGiteaTestSuite(t *testing.T) { - suite.Run(t, new(GiteaTestSuite)) -} diff --git a/database/sql/github.go b/database/sql/github.go index 626d138f..d787653d 100644 --- a/database/sql/github.go +++ b/database/sql/github.go @@ -16,18 +16,104 @@ package sql import ( "context" - "errors" - "fmt" + "github.com/google/uuid" + "github.com/pkg/errors" "gorm.io/gorm" runnerErrors "github.com/cloudbase/garm-provider-common/errors" + "github.com/cloudbase/garm-provider-common/util" "github.com/cloudbase/garm/auth" "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" ) -func (s *sqlDatabase) CreateGithubEndpoint(_ context.Context, param params.CreateGithubEndpointParams) (ghEndpoint params.ForgeEndpoint, err error) { +func (s *sqlDatabase) sqlToCommonGithubCredentials(creds GithubCredentials) (params.GithubCredentials, error) { + if len(creds.Payload) == 0 { + return params.GithubCredentials{}, errors.New("empty credentials payload") + } + data, err := util.Unseal(creds.Payload, []byte(s.cfg.Passphrase)) + if err != nil { + return params.GithubCredentials{}, errors.Wrap(err, "unsealing credentials") + } + + ep, err := s.sqlToCommonGithubEndpoint(creds.Endpoint) + if err != nil { + return params.GithubCredentials{}, errors.Wrap(err, "converting github endpoint") + } + + commonCreds := params.GithubCredentials{ + ID: creds.ID, + Name: creds.Name, + Description: creds.Description, + APIBaseURL: creds.Endpoint.APIBaseURL, + BaseURL: creds.Endpoint.BaseURL, + UploadBaseURL: creds.Endpoint.UploadBaseURL, + CABundle: creds.Endpoint.CACertBundle, + AuthType: creds.AuthType, + CreatedAt: creds.CreatedAt, + UpdatedAt: creds.UpdatedAt, + Endpoint: ep, + CredentialsPayload: data, + } + + for _, repo := range creds.Repositories { + commonRepo, err := s.sqlToCommonRepository(repo, false) + if err != nil { + return params.GithubCredentials{}, errors.Wrap(err, "converting github repository") + } + commonCreds.Repositories = append(commonCreds.Repositories, commonRepo) + } + + for _, org := range creds.Organizations { + commonOrg, err := s.sqlToCommonOrganization(org, false) + if err != nil { + return params.GithubCredentials{}, errors.Wrap(err, "converting github organization") + } + commonCreds.Organizations = append(commonCreds.Organizations, commonOrg) + } + + for _, ent := range creds.Enterprises { + commonEnt, err := s.sqlToCommonEnterprise(ent, false) + if err != nil { + return params.GithubCredentials{}, errors.Wrapf(err, "converting github enterprise: %s", ent.Name) + } + commonCreds.Enterprises = append(commonCreds.Enterprises, commonEnt) + } + + return commonCreds, nil +} + +func (s *sqlDatabase) sqlToCommonGithubEndpoint(ep GithubEndpoint) (params.GithubEndpoint, error) { + return params.GithubEndpoint{ + Name: ep.Name, + Description: ep.Description, + APIBaseURL: ep.APIBaseURL, + BaseURL: ep.BaseURL, + UploadBaseURL: ep.UploadBaseURL, + CACertBundle: ep.CACertBundle, + CreatedAt: ep.CreatedAt, + UpdatedAt: ep.UpdatedAt, + }, nil +} + +func getUIDFromContext(ctx context.Context) (uuid.UUID, error) { + userID := auth.UserID(ctx) + if userID == "" { + return uuid.Nil, errors.Wrap(runnerErrors.ErrUnauthorized, "getting UID from context") + } + + asUUID, err := uuid.Parse(userID) + if err != nil { + return uuid.Nil, errors.Wrap(runnerErrors.ErrUnauthorized, "parsing UID from context") + } + return asUUID, nil +} + +func (s *sqlDatabase) CreateGithubEndpoint(_ context.Context, param params.CreateGithubEndpointParams) (ghEndpoint params.GithubEndpoint, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + defer func() { if err == nil { s.sendNotify(common.GithubEndpointEntityType, common.CreateOperation, ghEndpoint) @@ -36,7 +122,7 @@ func (s *sqlDatabase) CreateGithubEndpoint(_ context.Context, param params.Creat var endpoint GithubEndpoint err = s.conn.Transaction(func(tx *gorm.DB) error { if err := tx.Where("name = ?", param.Name).First(&endpoint).Error; err == nil { - return fmt.Errorf("error github endpoint already exists: %w", runnerErrors.ErrDuplicateEntity) + return errors.Wrap(runnerErrors.ErrDuplicateEntity, "github endpoint already exists") } endpoint = GithubEndpoint{ Name: param.Name, @@ -45,43 +131,45 @@ func (s *sqlDatabase) CreateGithubEndpoint(_ context.Context, param params.Creat BaseURL: param.BaseURL, UploadBaseURL: param.UploadBaseURL, CACertBundle: param.CACertBundle, - EndpointType: params.GithubEndpointType, } if err := tx.Create(&endpoint).Error; err != nil { - return fmt.Errorf("error creating github endpoint: %w", err) + return errors.Wrap(err, "creating github endpoint") } return nil }) if err != nil { - return params.ForgeEndpoint{}, fmt.Errorf("error creating github endpoint: %w", err) + return params.GithubEndpoint{}, errors.Wrap(err, "creating github endpoint") } ghEndpoint, err = s.sqlToCommonGithubEndpoint(endpoint) if err != nil { - return params.ForgeEndpoint{}, fmt.Errorf("error converting github endpoint: %w", err) + return params.GithubEndpoint{}, errors.Wrap(err, "converting github endpoint") } return ghEndpoint, nil } -func (s *sqlDatabase) ListGithubEndpoints(_ context.Context) ([]params.ForgeEndpoint, error) { +func (s *sqlDatabase) ListGithubEndpoints(_ context.Context) ([]params.GithubEndpoint, error) { var endpoints []GithubEndpoint - err := s.conn.Where("endpoint_type = ?", params.GithubEndpointType).Find(&endpoints).Error + err := s.conn.Find(&endpoints).Error if err != nil { - return nil, fmt.Errorf("error fetching github endpoints: %w", err) + return nil, errors.Wrap(err, "fetching github endpoints") } - var ret []params.ForgeEndpoint + var ret []params.GithubEndpoint for _, ep := range endpoints { commonEp, err := s.sqlToCommonGithubEndpoint(ep) if err != nil { - return nil, fmt.Errorf("error converting github endpoint: %w", err) + return nil, errors.Wrap(err, "converting github endpoint") } ret = append(ret, commonEp) } return ret, nil } -func (s *sqlDatabase) UpdateGithubEndpoint(_ context.Context, name string, param params.UpdateGithubEndpointParams) (ghEndpoint params.ForgeEndpoint, err error) { +func (s *sqlDatabase) UpdateGithubEndpoint(_ context.Context, name string, param params.UpdateGithubEndpointParams) (ghEndpoint params.GithubEndpoint, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + defer func() { if err == nil { s.sendNotify(common.GithubEndpointEntityType, common.UpdateOperation, ghEndpoint) @@ -89,21 +177,11 @@ func (s *sqlDatabase) UpdateGithubEndpoint(_ context.Context, name string, param }() var endpoint GithubEndpoint err = s.conn.Transaction(func(tx *gorm.DB) error { - if err := tx.Where("name = ? and endpoint_type = ?", name, params.GithubEndpointType).First(&endpoint).Error; err != nil { + if err := tx.Where("name = ?", name).First(&endpoint).Error; err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return fmt.Errorf("error github endpoint not found: %w", runnerErrors.ErrNotFound) + return errors.Wrap(runnerErrors.ErrNotFound, "github endpoint not found") } - return fmt.Errorf("error fetching github endpoint: %w", err) - } - - var credsCount int64 - if err := tx.Model(&GithubCredentials{}).Where("endpoint_name = ?", endpoint.Name).Count(&credsCount).Error; err != nil { - if !errors.Is(err, gorm.ErrRecordNotFound) { - return fmt.Errorf("error fetching github credentials: %w", err) - } - } - if credsCount > 0 && (param.APIBaseURL != nil || param.BaseURL != nil || param.UploadBaseURL != nil) { - return fmt.Errorf("cannot update endpoint URLs with existing credentials: %w", runnerErrors.ErrBadRequest) + return errors.Wrap(err, "fetching github endpoint") } if param.APIBaseURL != nil { @@ -127,100 +205,106 @@ func (s *sqlDatabase) UpdateGithubEndpoint(_ context.Context, name string, param } if err := tx.Save(&endpoint).Error; err != nil { - return fmt.Errorf("error updating github endpoint: %w", err) + return errors.Wrap(err, "updating github endpoint") } return nil }) if err != nil { - return params.ForgeEndpoint{}, fmt.Errorf("error updating github endpoint: %w", err) + return params.GithubEndpoint{}, errors.Wrap(err, "updating github endpoint") } ghEndpoint, err = s.sqlToCommonGithubEndpoint(endpoint) if err != nil { - return params.ForgeEndpoint{}, fmt.Errorf("error converting github endpoint: %w", err) + return params.GithubEndpoint{}, errors.Wrap(err, "converting github endpoint") } return ghEndpoint, nil } -func (s *sqlDatabase) GetGithubEndpoint(_ context.Context, name string) (params.ForgeEndpoint, error) { +func (s *sqlDatabase) GetGithubEndpoint(_ context.Context, name string) (params.GithubEndpoint, error) { var endpoint GithubEndpoint - err := s.conn.Where("name = ? and endpoint_type = ?", name, params.GithubEndpointType).First(&endpoint).Error + err := s.conn.Where("name = ?", name).First(&endpoint).Error if err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return params.ForgeEndpoint{}, fmt.Errorf("github endpoint not found: %w", runnerErrors.ErrNotFound) + return params.GithubEndpoint{}, errors.Wrap(runnerErrors.ErrNotFound, "github endpoint not found") } - return params.ForgeEndpoint{}, fmt.Errorf("error fetching github endpoint: %w", err) + return params.GithubEndpoint{}, errors.Wrap(err, "fetching github endpoint") } return s.sqlToCommonGithubEndpoint(endpoint) } func (s *sqlDatabase) DeleteGithubEndpoint(_ context.Context, name string) (err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + defer func() { if err == nil { - s.sendNotify(common.GithubEndpointEntityType, common.DeleteOperation, params.ForgeEndpoint{Name: name}) + s.sendNotify(common.GithubEndpointEntityType, common.DeleteOperation, params.GithubEndpoint{Name: name}) } }() err = s.conn.Transaction(func(tx *gorm.DB) error { var endpoint GithubEndpoint - if err := tx.Where("name = ? and endpoint_type = ?", name, params.GithubEndpointType).First(&endpoint).Error; err != nil { + if err := tx.Where("name = ?", name).First(&endpoint).Error; err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { return nil } - return fmt.Errorf("error fetching github endpoint: %w", err) + return errors.Wrap(err, "fetching github endpoint") } var credsCount int64 if err := tx.Model(&GithubCredentials{}).Where("endpoint_name = ?", endpoint.Name).Count(&credsCount).Error; err != nil { if !errors.Is(err, gorm.ErrRecordNotFound) { - return fmt.Errorf("error fetching github credentials: %w", err) + return errors.Wrap(err, "fetching github credentials") } } var repoCnt int64 if err := tx.Model(&Repository{}).Where("endpoint_name = ?", endpoint.Name).Count(&repoCnt).Error; err != nil { if !errors.Is(err, gorm.ErrRecordNotFound) { - return fmt.Errorf("error fetching github repositories: %w", err) + return errors.Wrap(err, "fetching github repositories") } } var orgCnt int64 if err := tx.Model(&Organization{}).Where("endpoint_name = ?", endpoint.Name).Count(&orgCnt).Error; err != nil { if !errors.Is(err, gorm.ErrRecordNotFound) { - return fmt.Errorf("error fetching github organizations: %w", err) + return errors.Wrap(err, "fetching github organizations") } } var entCnt int64 if err := tx.Model(&Enterprise{}).Where("endpoint_name = ?", endpoint.Name).Count(&entCnt).Error; err != nil { if !errors.Is(err, gorm.ErrRecordNotFound) { - return fmt.Errorf("error fetching github enterprises: %w", err) + return errors.Wrap(err, "fetching github enterprises") } } if credsCount > 0 || repoCnt > 0 || orgCnt > 0 || entCnt > 0 { - return fmt.Errorf("cannot delete endpoint with associated entities: %w", runnerErrors.ErrBadRequest) + return errors.Wrap(runnerErrors.ErrBadRequest, "cannot delete endpoint with associated entities") } if err := tx.Unscoped().Delete(&endpoint).Error; err != nil { - return fmt.Errorf("error deleting github endpoint: %w", err) + return errors.Wrap(err, "deleting github endpoint") } return nil }) if err != nil { - return fmt.Errorf("error deleting github endpoint: %w", err) + return errors.Wrap(err, "deleting github endpoint") } return nil } -func (s *sqlDatabase) CreateGithubCredentials(ctx context.Context, param params.CreateGithubCredentialsParams) (ghCreds params.ForgeCredentials, err error) { +func (s *sqlDatabase) CreateGithubCredentials(ctx context.Context, param params.CreateGithubCredentialsParams) (ghCreds params.GithubCredentials, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + userID, err := getUIDFromContext(ctx) if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error creating github credentials: %w", err) + return params.GithubCredentials{}, errors.Wrap(err, "creating github credentials") } if param.Endpoint == "" { - return params.ForgeCredentials{}, fmt.Errorf("endpoint name is required: %w", runnerErrors.ErrBadRequest) + return params.GithubCredentials{}, errors.Wrap(runnerErrors.ErrBadRequest, "endpoint name is required") } defer func() { @@ -231,29 +315,29 @@ func (s *sqlDatabase) CreateGithubCredentials(ctx context.Context, param params. var creds GithubCredentials err = s.conn.Transaction(func(tx *gorm.DB) error { var endpoint GithubEndpoint - if err := tx.Where("name = ? and endpoint_type = ?", param.Endpoint, params.GithubEndpointType).First(&endpoint).Error; err != nil { + if err := tx.Where("name = ?", param.Endpoint).First(&endpoint).Error; err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return fmt.Errorf("github endpoint not found: %w", runnerErrors.ErrNotFound) + return errors.Wrap(runnerErrors.ErrNotFound, "github endpoint not found") } - return fmt.Errorf("error fetching github endpoint: %w", err) + return errors.Wrap(err, "fetching github endpoint") } if err := tx.Where("name = ? and user_id = ?", param.Name, userID).First(&creds).Error; err == nil { - return fmt.Errorf("github credentials already exists: %w", runnerErrors.ErrDuplicateEntity) + return errors.Wrap(runnerErrors.ErrDuplicateEntity, "github credentials already exists") } var data []byte var err error switch param.AuthType { - case params.ForgeAuthTypePAT: + case params.GithubAuthTypePAT: data, err = s.marshalAndSeal(param.PAT) - case params.ForgeAuthTypeApp: + case params.GithubAuthTypeApp: data, err = s.marshalAndSeal(param.App) default: - return fmt.Errorf("invalid auth type: %w", runnerErrors.ErrBadRequest) + return errors.Wrap(runnerErrors.ErrBadRequest, "invalid auth type") } if err != nil { - return fmt.Errorf("error marshaling and sealing credentials: %w", err) + return errors.Wrap(err, "marshaling and sealing credentials") } creds = GithubCredentials{ @@ -266,7 +350,7 @@ func (s *sqlDatabase) CreateGithubCredentials(ctx context.Context, param params. } if err := tx.Create(&creds).Error; err != nil { - return fmt.Errorf("error creating github credentials: %w", err) + return errors.Wrap(err, "creating github credentials") } // Skip making an extra query. creds.Endpoint = endpoint @@ -274,11 +358,11 @@ func (s *sqlDatabase) CreateGithubCredentials(ctx context.Context, param params. return nil }) if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error creating github credentials: %w", err) + return params.GithubCredentials{}, errors.Wrap(err, "creating github credentials") } - ghCreds, err = s.sqlToCommonForgeCredentials(creds) + ghCreds, err = s.sqlToCommonGithubCredentials(creds) if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error converting github credentials: %w", err) + return params.GithubCredentials{}, errors.Wrap(err, "converting github credentials") } return ghCreds, nil } @@ -290,56 +374,51 @@ func (s *sqlDatabase) getGithubCredentialsByName(ctx context.Context, tx *gorm.D if detailed { q = q. Preload("Repositories"). - Preload("Repositories.Credentials"). Preload("Organizations"). - Preload("Organizations.Credentials"). - Preload("Enterprises"). - Preload("Enterprises.Credentials") + Preload("Enterprises") } userID, err := getUIDFromContext(ctx) if err != nil { - return GithubCredentials{}, fmt.Errorf("error fetching github credentials: %w", err) + return GithubCredentials{}, errors.Wrap(err, "fetching github credentials") } q = q.Where("user_id = ?", userID) err = q.Where("name = ?", name).First(&creds).Error if err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return GithubCredentials{}, fmt.Errorf("github credentials not found: %w", runnerErrors.ErrNotFound) + return GithubCredentials{}, errors.Wrap(runnerErrors.ErrNotFound, "github credentials not found") } - return GithubCredentials{}, fmt.Errorf("error fetching github credentials: %w", err) + return GithubCredentials{}, errors.Wrap(err, "fetching github credentials") } return creds, nil } -func (s *sqlDatabase) GetGithubCredentialsByName(ctx context.Context, name string, detailed bool) (params.ForgeCredentials, error) { +func (s *sqlDatabase) GetGithubCredentialsByName(ctx context.Context, name string, detailed bool) (params.GithubCredentials, error) { creds, err := s.getGithubCredentialsByName(ctx, s.conn, name, detailed) if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error fetching github credentials: %w", err) + return params.GithubCredentials{}, errors.Wrap(err, "fetching github credentials") } - return s.sqlToCommonForgeCredentials(creds) + + return s.sqlToCommonGithubCredentials(creds) } -func (s *sqlDatabase) GetGithubCredentials(ctx context.Context, id uint, detailed bool) (params.ForgeCredentials, error) { +func (s *sqlDatabase) GetGithubCredentials(ctx context.Context, id uint, detailed bool) (params.GithubCredentials, error) { var creds GithubCredentials q := s.conn.Preload("Endpoint") if detailed { q = q. Preload("Repositories"). - Preload("Repositories.Credentials"). Preload("Organizations"). - Preload("Organizations.Credentials"). - Preload("Enterprises"). - Preload("Enterprises.Credentials") + Preload("Enterprises") } if !auth.IsAdmin(ctx) { userID, err := getUIDFromContext(ctx) if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error fetching github credentials: %w", err) + return params.GithubCredentials{}, errors.Wrap(err, "fetching github credentials") } q = q.Where("user_id = ?", userID) } @@ -347,20 +426,20 @@ func (s *sqlDatabase) GetGithubCredentials(ctx context.Context, id uint, detaile err := q.Where("id = ?", id).First(&creds).Error if err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return params.ForgeCredentials{}, fmt.Errorf("github credentials not found: %w", runnerErrors.ErrNotFound) + return params.GithubCredentials{}, errors.Wrap(runnerErrors.ErrNotFound, "github credentials not found") } - return params.ForgeCredentials{}, fmt.Errorf("error fetching github credentials: %w", err) + return params.GithubCredentials{}, errors.Wrap(err, "fetching github credentials") } - return s.sqlToCommonForgeCredentials(creds) + return s.sqlToCommonGithubCredentials(creds) } -func (s *sqlDatabase) ListGithubCredentials(ctx context.Context) ([]params.ForgeCredentials, error) { +func (s *sqlDatabase) ListGithubCredentials(ctx context.Context) ([]params.GithubCredentials, error) { q := s.conn.Preload("Endpoint") if !auth.IsAdmin(ctx) { userID, err := getUIDFromContext(ctx) if err != nil { - return nil, fmt.Errorf("error fetching github credentials: %w", err) + return nil, errors.Wrap(err, "fetching github credentials") } q = q.Where("user_id = ?", userID) } @@ -368,21 +447,24 @@ func (s *sqlDatabase) ListGithubCredentials(ctx context.Context) ([]params.Forge var creds []GithubCredentials err := q.Preload("Endpoint").Find(&creds).Error if err != nil { - return nil, fmt.Errorf("error fetching github credentials: %w", err) + return nil, errors.Wrap(err, "fetching github credentials") } - var ret []params.ForgeCredentials + var ret []params.GithubCredentials for _, c := range creds { - commonCreds, err := s.sqlToCommonForgeCredentials(c) + commonCreds, err := s.sqlToCommonGithubCredentials(c) if err != nil { - return nil, fmt.Errorf("error converting github credentials: %w", err) + return nil, errors.Wrap(err, "converting github credentials") } ret = append(ret, commonCreds) } return ret, nil } -func (s *sqlDatabase) UpdateGithubCredentials(ctx context.Context, id uint, param params.UpdateGithubCredentialsParams) (ghCreds params.ForgeCredentials, err error) { +func (s *sqlDatabase) UpdateGithubCredentials(ctx context.Context, id uint, param params.UpdateGithubCredentialsParams) (ghCreds params.GithubCredentials, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + defer func() { if err == nil { s.sendNotify(common.GithubCredentialsEntityType, common.UpdateOperation, ghCreds) @@ -394,16 +476,16 @@ func (s *sqlDatabase) UpdateGithubCredentials(ctx context.Context, id uint, para if !auth.IsAdmin(ctx) { userID, err := getUIDFromContext(ctx) if err != nil { - return fmt.Errorf("error updating github credentials: %w", err) + return errors.Wrap(err, "updating github credentials") } q = q.Where("user_id = ?", userID) } if err := q.Where("id = ?", id).First(&creds).Error; err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return fmt.Errorf("github credentials not found: %w", runnerErrors.ErrNotFound) + return errors.Wrap(runnerErrors.ErrNotFound, "github credentials not found") } - return fmt.Errorf("error fetching github credentials: %w", err) + return errors.Wrap(err, "fetching github credentials") } if param.Name != nil { @@ -416,56 +498,59 @@ func (s *sqlDatabase) UpdateGithubCredentials(ctx context.Context, id uint, para var data []byte var err error switch creds.AuthType { - case params.ForgeAuthTypePAT: + case params.GithubAuthTypePAT: if param.PAT != nil { data, err = s.marshalAndSeal(param.PAT) } if param.App != nil { - return fmt.Errorf("cannot update app credentials for PAT: %w", runnerErrors.ErrBadRequest) + return errors.Wrap(runnerErrors.ErrBadRequest, "cannot update app credentials for PAT") } - case params.ForgeAuthTypeApp: + case params.GithubAuthTypeApp: if param.App != nil { data, err = s.marshalAndSeal(param.App) } if param.PAT != nil { - return fmt.Errorf("cannot update PAT credentials for app: %w", runnerErrors.ErrBadRequest) + return errors.Wrap(runnerErrors.ErrBadRequest, "cannot update PAT credentials for app") } default: // This should never happen, unless there was a bug in the DB migration code, // or the DB was manually modified. - return fmt.Errorf("invalid auth type: %w", runnerErrors.ErrBadRequest) + return errors.Wrap(runnerErrors.ErrBadRequest, "invalid auth type") } if err != nil { - return fmt.Errorf("error marshaling and sealing credentials: %w", err) + return errors.Wrap(err, "marshaling and sealing credentials") } if len(data) > 0 { creds.Payload = data } if err := tx.Save(&creds).Error; err != nil { - return fmt.Errorf("error updating github credentials: %w", err) + return errors.Wrap(err, "updating github credentials") } return nil }) if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error updating github credentials: %w", err) + return params.GithubCredentials{}, errors.Wrap(err, "updating github credentials") } - ghCreds, err = s.sqlToCommonForgeCredentials(creds) + ghCreds, err = s.sqlToCommonGithubCredentials(creds) if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error converting github credentials: %w", err) + return params.GithubCredentials{}, errors.Wrap(err, "converting github credentials") } return ghCreds, nil } func (s *sqlDatabase) DeleteGithubCredentials(ctx context.Context, id uint) (err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + var name string defer func() { if err == nil { - s.sendNotify(common.GithubCredentialsEntityType, common.DeleteOperation, params.ForgeCredentials{ID: id, Name: name}) + s.sendNotify(common.GithubCredentialsEntityType, common.DeleteOperation, params.GithubCredentials{ID: id, Name: name}) } }() err = s.conn.Transaction(func(tx *gorm.DB) error { @@ -476,7 +561,7 @@ func (s *sqlDatabase) DeleteGithubCredentials(ctx context.Context, id uint) (err if !auth.IsAdmin(ctx) { userID, err := getUIDFromContext(ctx) if err != nil { - return fmt.Errorf("error deleting github credentials: %w", err) + return errors.Wrap(err, "deleting github credentials") } q = q.Where("user_id = ?", userID) } @@ -487,27 +572,27 @@ func (s *sqlDatabase) DeleteGithubCredentials(ctx context.Context, id uint) (err if errors.Is(err, gorm.ErrRecordNotFound) { return nil } - return fmt.Errorf("error fetching github credentials: %w", err) + return errors.Wrap(err, "fetching github credentials") } name = creds.Name if len(creds.Repositories) > 0 { - return fmt.Errorf("cannot delete credentials with repositories: %w", runnerErrors.ErrBadRequest) + return errors.Wrap(runnerErrors.ErrBadRequest, "cannot delete credentials with repositories") } if len(creds.Organizations) > 0 { - return fmt.Errorf("cannot delete credentials with organizations: %w", runnerErrors.ErrBadRequest) + return errors.Wrap(runnerErrors.ErrBadRequest, "cannot delete credentials with organizations") } if len(creds.Enterprises) > 0 { - return fmt.Errorf("cannot delete credentials with enterprises: %w", runnerErrors.ErrBadRequest) + return errors.Wrap(runnerErrors.ErrBadRequest, "cannot delete credentials with enterprises") } if err := tx.Unscoped().Delete(&creds).Error; err != nil { - return fmt.Errorf("error deleting github credentials: %w", err) + return errors.Wrap(err, "deleting github credentials") } return nil }) if err != nil { - return fmt.Errorf("error deleting github credentials: %w", err) + return errors.Wrap(err, "deleting github credentials") } return nil } diff --git a/database/sql/github_test.go b/database/sql/github_test.go index ae3a3954..4d94a202 100644 --- a/database/sql/github_test.go +++ b/database/sql/github_test.go @@ -33,15 +33,14 @@ import ( ) const ( - testUploadBaseURL string = "https://uploads.example.com" - testBaseURL string = "https://example.com" - testAPIBaseURL string = "https://api.example.com" - testEndpointName string = "test-endpoint" - alternetTestEndpointName string = "test-endpoint-alternate" - testEndpointDescription string = "test description" - testCredsName string = "test-creds" - testCredsDescription string = "test creds" - defaultGithubEndpoint string = "github.com" + testUploadBaseURL string = "https://uploads.example.com" + testBaseURL string = "https://example.com" + testAPIBaseURL string = "https://api.example.com" + testEndpointName string = "test-endpoint" + testEndpointDescription string = "test description" + testCredsName string = "test-creds" + testCredsDescription string = "test creds" + defaultGithubEndpoint string = "github.com" ) type GithubTestSuite struct { @@ -65,12 +64,6 @@ func (s *GithubTestSuite) TestDefaultEndpointGetsCreatedAutomaticallyIfNoOtherEn s.Require().NotNil(endpoint) } -func (s *GithubTestSuite) TestDeletingDefaultEndpointWorksIfNoCredentials() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - err := s.db.DeleteGithubEndpoint(ctx, defaultGithubEndpoint) - s.Require().NoError(err) -} - func (s *GithubTestSuite) TestCreatingEndpoint() { ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) @@ -173,7 +166,7 @@ func (s *GithubTestSuite) TestDeleteGithubEndpointFailsWhenCredentialsExist() { Name: testCredsName, Description: testCredsDescription, Endpoint: testEndpointName, - AuthType: params.ForgeAuthTypePAT, + AuthType: params.GithubAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test", }, @@ -226,72 +219,6 @@ func (s *GithubTestSuite) TestUpdateEndpoint() { s.Require().Equal(caCertBundle, updatedEndpoint.CACertBundle) } -func (s *GithubTestSuite) TestUpdateEndpointURLsFailsIfCredentialsAreAssociated() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - createEpParams := params.CreateGithubEndpointParams{ - Name: testEndpointName, - Description: testEndpointDescription, - APIBaseURL: testAPIBaseURL, - UploadBaseURL: testUploadBaseURL, - BaseURL: testBaseURL, - } - - endpoint, err := s.db.CreateGithubEndpoint(ctx, createEpParams) - s.Require().NoError(err) - s.Require().NotNil(endpoint) - - credParams := params.CreateGithubCredentialsParams{ - Name: testCredsName, - Description: testCredsDescription, - Endpoint: testEndpointName, - AuthType: params.ForgeAuthTypePAT, - PAT: params.GithubPAT{ - OAuth2Token: "test", - }, - } - - _, err = s.db.CreateGithubCredentials(ctx, credParams) - s.Require().NoError(err) - - newDescription := "new description" - newBaseURL := "https://new.example.com" - newAPIBaseURL := "https://new-api.example.com" - newUploadBaseURL := "https://new-uploads.example.com" - updateEpParams := params.UpdateGithubEndpointParams{ - BaseURL: &newBaseURL, - } - - _, err = s.db.UpdateGithubEndpoint(ctx, testEndpointName, updateEpParams) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - s.Require().EqualError(err, "error updating github endpoint: cannot update endpoint URLs with existing credentials: invalid request") - - updateEpParams = params.UpdateGithubEndpointParams{ - UploadBaseURL: &newUploadBaseURL, - } - - _, err = s.db.UpdateGithubEndpoint(ctx, testEndpointName, updateEpParams) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - s.Require().EqualError(err, "error updating github endpoint: cannot update endpoint URLs with existing credentials: invalid request") - - updateEpParams = params.UpdateGithubEndpointParams{ - APIBaseURL: &newAPIBaseURL, - } - _, err = s.db.UpdateGithubEndpoint(ctx, testEndpointName, updateEpParams) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - s.Require().EqualError(err, "error updating github endpoint: cannot update endpoint URLs with existing credentials: invalid request") - - updateEpParams = params.UpdateGithubEndpointParams{ - Description: &newDescription, - } - ret, err := s.db.UpdateGithubEndpoint(ctx, testEndpointName, updateEpParams) - s.Require().NoError(err) - s.Require().Equal(newDescription, ret.Description) -} - func (s *GithubTestSuite) TestUpdatingNonExistingEndpointReturnsNotFoundError() { ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) @@ -366,7 +293,7 @@ func (s *GithubTestSuite) TestCreateCredentials() { Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.ForgeAuthTypePAT, + AuthType: params.GithubAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test", }, @@ -390,7 +317,7 @@ func (s *GithubTestSuite) TestCreateCredentialsFailsOnDuplicateCredentials() { Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.ForgeAuthTypePAT, + AuthType: params.GithubAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test", }, @@ -420,7 +347,7 @@ func (s *GithubTestSuite) TestNormalUsersCanOnlySeeTheirOwnCredentialsAdminCanSe Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.ForgeAuthTypePAT, + AuthType: params.GithubAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test", }, @@ -476,7 +403,7 @@ func (s *GithubTestSuite) TestGetGithubCredentialsByNameReturnsOnlyCurrentUserCr Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.ForgeAuthTypePAT, + AuthType: params.GithubAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test", }, @@ -521,7 +448,7 @@ func (s *GithubTestSuite) TestGetGithubCredentials() { Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.ForgeAuthTypePAT, + AuthType: params.GithubAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test", }, @@ -551,7 +478,7 @@ func (s *GithubTestSuite) TestDeleteGithubCredentials() { Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.ForgeAuthTypePAT, + AuthType: params.GithubAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test", }, @@ -578,7 +505,7 @@ func (s *GithubTestSuite) TestDeleteGithubCredentialsByNonAdminUser() { Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.ForgeAuthTypePAT, + AuthType: params.GithubAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test-creds4", }, @@ -623,7 +550,7 @@ func (s *GithubTestSuite) TestDeleteCredentialsFailsIfReposOrgsOrEntitiesUseIt() Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.ForgeAuthTypePAT, + AuthType: params.GithubAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test", }, @@ -633,7 +560,7 @@ func (s *GithubTestSuite) TestDeleteCredentialsFailsIfReposOrgsOrEntitiesUseIt() s.Require().NoError(err) s.Require().NotNil(creds) - repo, err := s.db.CreateRepository(ctx, "test-owner", "test-repo", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) + repo, err := s.db.CreateRepository(ctx, "test-owner", "test-repo", creds.Name, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) s.Require().NoError(err) s.Require().NotNil(repo) @@ -644,7 +571,7 @@ func (s *GithubTestSuite) TestDeleteCredentialsFailsIfReposOrgsOrEntitiesUseIt() err = s.db.DeleteRepository(ctx, repo.ID) s.Require().NoError(err) - org, err := s.db.CreateOrganization(ctx, "test-org", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) + org, err := s.db.CreateOrganization(ctx, "test-org", creds.Name, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) s.Require().NoError(err) s.Require().NotNil(org) @@ -655,7 +582,7 @@ func (s *GithubTestSuite) TestDeleteCredentialsFailsIfReposOrgsOrEntitiesUseIt() err = s.db.DeleteOrganization(ctx, org.ID) s.Require().NoError(err) - enterprise, err := s.db.CreateEnterprise(ctx, "test-enterprise", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) + enterprise, err := s.db.CreateEnterprise(ctx, "test-enterprise", creds.Name, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) s.Require().NoError(err) s.Require().NotNil(enterprise) @@ -681,7 +608,7 @@ func (s *GithubTestSuite) TestUpdateCredentials() { Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.ForgeAuthTypePAT, + AuthType: params.GithubAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test", }, @@ -716,7 +643,7 @@ func (s *GithubTestSuite) TestUpdateGithubCredentialsFailIfWrongCredentialTypeIs Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.ForgeAuthTypePAT, + AuthType: params.GithubAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test", }, @@ -737,13 +664,13 @@ func (s *GithubTestSuite) TestUpdateGithubCredentialsFailIfWrongCredentialTypeIs _, err = s.db.UpdateGithubCredentials(ctx, creds.ID, updateCredParams) s.Require().Error(err) s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - s.Require().EqualError(err, "error updating github credentials: cannot update app credentials for PAT: invalid request") + s.Require().EqualError(err, "updating github credentials: cannot update app credentials for PAT: invalid request") credParamsWithApp := params.CreateGithubCredentialsParams{ Name: "test-credsApp", Description: "test credsApp", Endpoint: defaultGithubEndpoint, - AuthType: params.ForgeAuthTypeApp, + AuthType: params.GithubAuthTypeApp, App: params.GithubApp{ AppID: 1, InstallationID: 2, @@ -764,7 +691,7 @@ func (s *GithubTestSuite) TestUpdateGithubCredentialsFailIfWrongCredentialTypeIs _, err = s.db.UpdateGithubCredentials(ctx, credsApp.ID, updateCredParams) s.Require().Error(err) s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - s.Require().EqualError(err, "error updating github credentials: cannot update PAT credentials for app: invalid request") + s.Require().EqualError(err, "updating github credentials: cannot update PAT credentials for app: invalid request") } func (s *GithubTestSuite) TestUpdateCredentialsFailsForNonExistingCredentials() { @@ -788,7 +715,7 @@ func (s *GithubTestSuite) TestUpdateCredentialsFailsIfCredentialsAreOwnedByNonAd Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.ForgeAuthTypePAT, + AuthType: params.GithubAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test-creds5", }, @@ -817,7 +744,7 @@ func (s *GithubTestSuite) TestAdminUserCanUpdateAnyGithubCredentials() { Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.ForgeAuthTypePAT, + AuthType: params.GithubAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test-creds5", }, @@ -837,69 +764,8 @@ func (s *GithubTestSuite) TestAdminUserCanUpdateAnyGithubCredentials() { s.Require().Equal(newDescription, newCreds.Description) } -func (s *GithubTestSuite) TestDeleteGithubEndpointFailsWithOrgsReposOrCredentials() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - endpointParams := params.CreateGithubEndpointParams{ - Name: "deleteme", - Description: testEndpointDescription, - APIBaseURL: testAPIBaseURL, - BaseURL: testBaseURL, - } - - ep, err := s.db.CreateGithubEndpoint(ctx, endpointParams) - s.Require().NoError(err) - s.Require().NotNil(ep) - - credParams := params.CreateGithubCredentialsParams{ - Name: testCredsName, - Description: testCredsDescription, - Endpoint: ep.Name, - AuthType: params.ForgeAuthTypePAT, - PAT: params.GithubPAT{ - OAuth2Token: "test-creds5", - }, - } - - creds, err := s.db.CreateGithubCredentials(ctx, credParams) - s.Require().NoError(err) - s.Require().NotNil(creds) - - repo, err := s.db.CreateRepository(ctx, "test-owner", "test-repo", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) - s.Require().NoError(err) - s.Require().NotNil(repo) - - badRequest := &runnerErrors.BadRequestError{} - err = s.db.DeleteGithubEndpoint(ctx, ep.Name) - s.Require().Error(err) - s.Require().ErrorAs(err, &badRequest) - - err = s.db.DeleteRepository(ctx, repo.ID) - s.Require().NoError(err) - - org, err := s.db.CreateOrganization(ctx, "test-org", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) - s.Require().NoError(err) - s.Require().NotNil(org) - - err = s.db.DeleteGithubEndpoint(ctx, ep.Name) - s.Require().Error(err) - s.Require().ErrorAs(err, &badRequest) - - err = s.db.DeleteOrganization(ctx, org.ID) - s.Require().NoError(err) - - err = s.db.DeleteGithubCredentials(ctx, creds.ID) - s.Require().NoError(err) - - err = s.db.DeleteGithubEndpoint(ctx, ep.Name) - s.Require().NoError(err) - - _, err = s.db.GetGithubEndpoint(ctx, ep.Name) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrNotFound) -} - func TestGithubTestSuite(t *testing.T) { + t.Parallel() suite.Run(t, new(GithubTestSuite)) } @@ -998,10 +864,10 @@ func TestCredentialsAndEndpointMigration(t *testing.T) { t.Fatalf("expected ghes-test to be associated with example.com endpoint, got %s", creds[1].Endpoint.Name) } - if creds[0].AuthType != params.ForgeAuthTypePAT { + if creds[0].AuthType != params.GithubAuthTypePAT { t.Fatalf("expected test-creds to have PAT auth type, got %s", creds[0].AuthType) } - if creds[1].AuthType != params.ForgeAuthTypeApp { + if creds[1].AuthType != params.GithubAuthTypeApp { t.Fatalf("expected ghes-test to have App auth type, got %s", creds[1].AuthType) } if len(creds[0].CredentialsPayload) == 0 { diff --git a/database/sql/instances.go b/database/sql/instances.go index 5f9d018e..c7fb02f6 100644 --- a/database/sql/instances.go +++ b/database/sql/instances.go @@ -17,11 +17,10 @@ package sql import ( "context" "encoding/json" - "errors" - "fmt" "log/slog" "github.com/google/uuid" + "github.com/pkg/errors" "gorm.io/datatypes" "gorm.io/gorm" "gorm.io/gorm/clause" @@ -32,9 +31,12 @@ import ( ) func (s *sqlDatabase) CreateInstance(_ context.Context, poolID string, param params.CreateInstanceParams) (instance params.Instance, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + pool, err := s.getPoolByID(s.conn, poolID) if err != nil { - return params.Instance{}, fmt.Errorf("error fetching pool: %w", err) + return params.Instance{}, errors.Wrap(err, "fetching pool") } defer func() { @@ -47,7 +49,7 @@ func (s *sqlDatabase) CreateInstance(_ context.Context, poolID string, param par if len(param.AditionalLabels) > 0 { labels, err = json.Marshal(param.AditionalLabels) if err != nil { - return params.Instance{}, fmt.Errorf("error marshalling labels: %w", err) + return params.Instance{}, errors.Wrap(err, "marshalling labels") } } @@ -55,7 +57,7 @@ func (s *sqlDatabase) CreateInstance(_ context.Context, poolID string, param par if len(param.JitConfiguration) > 0 { secret, err = s.marshalAndSeal(param.JitConfiguration) if err != nil { - return params.Instance{}, fmt.Errorf("error marshalling jit config: %w", err) + return params.Instance{}, errors.Wrap(err, "marshalling jit config") } } @@ -75,7 +77,7 @@ func (s *sqlDatabase) CreateInstance(_ context.Context, poolID string, param par } q := s.conn.Create(&newInstance) if q.Error != nil { - return params.Instance{}, fmt.Errorf("error creating instance: %w", q.Error) + return params.Instance{}, errors.Wrap(q.Error, "creating instance") } return s.sqlToParamsInstance(newInstance) @@ -84,7 +86,7 @@ func (s *sqlDatabase) CreateInstance(_ context.Context, poolID string, param par func (s *sqlDatabase) getPoolInstanceByName(poolID string, instanceName string) (Instance, error) { pool, err := s.getPoolByID(s.conn, poolID) if err != nil { - return Instance{}, fmt.Errorf("error fetching pool: %w", err) + return Instance{}, errors.Wrap(err, "fetching pool") } var instance Instance @@ -94,25 +96,16 @@ func (s *sqlDatabase) getPoolInstanceByName(poolID string, instanceName string) First(&instance) if q.Error != nil { if errors.Is(q.Error, gorm.ErrRecordNotFound) { - return Instance{}, fmt.Errorf("error fetching pool instance by name: %w", runnerErrors.ErrNotFound) + return Instance{}, errors.Wrap(runnerErrors.ErrNotFound, "fetching pool instance by name") } - return Instance{}, fmt.Errorf("error fetching pool instance by name: %w", q.Error) + return Instance{}, errors.Wrap(q.Error, "fetching pool instance by name") } - - instance.Pool = pool return instance, nil } -func (s *sqlDatabase) getInstance(_ context.Context, instanceNameOrID string, preload ...string) (Instance, error) { +func (s *sqlDatabase) getInstanceByName(_ context.Context, instanceName string, preload ...string) (Instance, error) { var instance Instance - var whereArg any = instanceNameOrID - whereClause := "name = ?" - id, err := uuid.Parse(instanceNameOrID) - if err == nil { - whereArg = id - whereClause = "id = ?" - } q := s.conn if len(preload) > 0 { @@ -123,33 +116,42 @@ func (s *sqlDatabase) getInstance(_ context.Context, instanceNameOrID string, pr q = q.Model(&Instance{}). Preload(clause.Associations). - Where(whereClause, whereArg). + Where("name = ?", instanceName). First(&instance) if q.Error != nil { if errors.Is(q.Error, gorm.ErrRecordNotFound) { - return Instance{}, fmt.Errorf("error fetching instance by name: %w", runnerErrors.ErrNotFound) + return Instance{}, errors.Wrap(runnerErrors.ErrNotFound, "fetching instance by name") } - return Instance{}, fmt.Errorf("error fetching instance by name: %w", q.Error) + return Instance{}, errors.Wrap(q.Error, "fetching instance by name") } return instance, nil } -func (s *sqlDatabase) GetInstance(ctx context.Context, instanceName string) (params.Instance, error) { - instance, err := s.getInstance(ctx, instanceName, "StatusMessages", "Pool", "ScaleSet") +func (s *sqlDatabase) GetPoolInstanceByName(_ context.Context, poolID string, instanceName string) (params.Instance, error) { + instance, err := s.getPoolInstanceByName(poolID, instanceName) if err != nil { - return params.Instance{}, fmt.Errorf("error fetching instance: %w", err) + return params.Instance{}, errors.Wrap(err, "fetching instance") + } + + return s.sqlToParamsInstance(instance) +} + +func (s *sqlDatabase) GetInstanceByName(ctx context.Context, instanceName string) (params.Instance, error) { + instance, err := s.getInstanceByName(ctx, instanceName, "StatusMessages") + if err != nil { + return params.Instance{}, errors.Wrap(err, "fetching instance") } return s.sqlToParamsInstance(instance) } func (s *sqlDatabase) DeleteInstance(_ context.Context, poolID string, instanceName string) (err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + instance, err := s.getPoolInstanceByName(poolID, instanceName) if err != nil { - if errors.Is(err, runnerErrors.ErrNotFound) { - return nil - } - return fmt.Errorf("error deleting instance: %w", err) + return errors.Wrap(err, "deleting instance") } defer func() { @@ -158,20 +160,13 @@ func (s *sqlDatabase) DeleteInstance(_ context.Context, poolID string, instanceN if instance.ProviderID != nil { providerID = *instance.ProviderID } - instanceNotif := params.Instance{ + if notifyErr := s.sendNotify(common.InstanceEntityType, common.DeleteOperation, params.Instance{ ID: instance.ID.String(), Name: instance.Name, ProviderID: providerID, AgentID: instance.AgentID, - } - switch { - case instance.PoolID != nil: - instanceNotif.PoolID = instance.PoolID.String() - case instance.ScaleSetFkID != nil: - instanceNotif.ScaleSetID = *instance.ScaleSetFkID - } - - if notifyErr := s.sendNotify(common.InstanceEntityType, common.DeleteOperation, instanceNotif); notifyErr != nil { + PoolID: instance.PoolID.String(), + }); notifyErr != nil { slog.With(slog.Any("error", notifyErr)).Error("failed to send notify") } } @@ -181,57 +176,18 @@ func (s *sqlDatabase) DeleteInstance(_ context.Context, poolID string, instanceN if errors.Is(q.Error, gorm.ErrRecordNotFound) { return nil } - return fmt.Errorf("error deleting instance: %w", q.Error) - } - return nil -} - -func (s *sqlDatabase) DeleteInstanceByName(ctx context.Context, instanceName string) error { - instance, err := s.getInstance(ctx, instanceName, "Pool", "ScaleSet") - if err != nil { - if errors.Is(err, runnerErrors.ErrNotFound) { - return nil - } - return fmt.Errorf("error deleting instance: %w", err) - } - - defer func() { - if err == nil { - var providerID string - if instance.ProviderID != nil { - providerID = *instance.ProviderID - } - payload := params.Instance{ - ID: instance.ID.String(), - Name: instance.Name, - ProviderID: providerID, - AgentID: instance.AgentID, - } - if instance.PoolID != nil { - payload.PoolID = instance.PoolID.String() - } - if instance.ScaleSetFkID != nil { - payload.ScaleSetID = *instance.ScaleSetFkID - } - if notifyErr := s.sendNotify(common.InstanceEntityType, common.DeleteOperation, payload); notifyErr != nil { - slog.With(slog.Any("error", notifyErr)).Error("failed to send notify") - } - } - }() - - if q := s.conn.Unscoped().Delete(&instance); q.Error != nil { - if errors.Is(q.Error, gorm.ErrRecordNotFound) { - return nil - } - return fmt.Errorf("error deleting instance: %w", q.Error) + return errors.Wrap(q.Error, "deleting instance") } return nil } func (s *sqlDatabase) AddInstanceEvent(ctx context.Context, instanceName string, event params.EventType, eventLevel params.EventLevel, statusMessage string) error { - instance, err := s.getInstance(ctx, instanceName) + s.writeMux.Lock() + defer s.writeMux.Unlock() + + instance, err := s.getInstanceByName(ctx, instanceName) if err != nil { - return fmt.Errorf("error updating instance: %w", err) + return errors.Wrap(err, "updating instance") } msg := InstanceStatusUpdate{ @@ -241,15 +197,18 @@ func (s *sqlDatabase) AddInstanceEvent(ctx context.Context, instanceName string, } if err := s.conn.Model(&instance).Association("StatusMessages").Append(&msg); err != nil { - return fmt.Errorf("error adding status message: %w", err) + return errors.Wrap(err, "adding status message") } return nil } func (s *sqlDatabase) UpdateInstance(ctx context.Context, instanceName string, param params.UpdateInstanceParams) (params.Instance, error) { - instance, err := s.getInstance(ctx, instanceName, "Pool", "ScaleSet") + s.writeMux.Lock() + defer s.writeMux.Unlock() + + instance, err := s.getInstanceByName(ctx, instanceName) if err != nil { - return params.Instance{}, fmt.Errorf("error updating instance: %w", err) + return params.Instance{}, errors.Wrap(err, "updating instance") } if param.AgentID != 0 { @@ -286,7 +245,7 @@ func (s *sqlDatabase) UpdateInstance(ctx context.Context, instanceName string, p if param.JitConfiguration != nil { secret, err := s.marshalAndSeal(param.JitConfiguration) if err != nil { - return params.Instance{}, fmt.Errorf("error marshalling jit config: %w", err) + return params.Instance{}, errors.Wrap(err, "marshalling jit config") } instance.JitConfiguration = secret } @@ -295,7 +254,7 @@ func (s *sqlDatabase) UpdateInstance(ctx context.Context, instanceName string, p q := s.conn.Save(&instance) if q.Error != nil { - return params.Instance{}, fmt.Errorf("error updating instance: %w", q.Error) + return params.Instance{}, errors.Wrap(q.Error, "updating instance") } if len(param.Addresses) > 0 { @@ -307,12 +266,12 @@ func (s *sqlDatabase) UpdateInstance(ctx context.Context, instanceName string, p }) } if err := s.conn.Model(&instance).Association("Addresses").Replace(addrs); err != nil { - return params.Instance{}, fmt.Errorf("error updating addresses: %w", err) + return params.Instance{}, errors.Wrap(err, "updating addresses") } } inst, err := s.sqlToParamsInstance(instance) if err != nil { - return params.Instance{}, fmt.Errorf("error converting instance: %w", err) + return params.Instance{}, errors.Wrap(err, "converting instance") } s.sendNotify(common.InstanceEntityType, common.UpdateOperation, inst) return inst, nil @@ -321,24 +280,21 @@ func (s *sqlDatabase) UpdateInstance(ctx context.Context, instanceName string, p func (s *sqlDatabase) ListPoolInstances(_ context.Context, poolID string) ([]params.Instance, error) { u, err := uuid.Parse(poolID) if err != nil { - return nil, fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) + return nil, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") } var instances []Instance - query := s.conn. - Preload("Pool"). - Preload("Job"). - Where("pool_id = ?", u) + query := s.conn.Model(&Instance{}).Preload("Job").Where("pool_id = ?", u) if err := query.Find(&instances); err.Error != nil { - return nil, fmt.Errorf("error fetching instances: %w", err.Error) + return nil, errors.Wrap(err.Error, "fetching instances") } ret := make([]params.Instance, len(instances)) for idx, inst := range instances { ret[idx], err = s.sqlToParamsInstance(inst) if err != nil { - return nil, fmt.Errorf("error converting instance: %w", err) + return nil, errors.Wrap(err, "converting instance") } } return ret, nil @@ -347,20 +303,16 @@ func (s *sqlDatabase) ListPoolInstances(_ context.Context, poolID string) ([]par func (s *sqlDatabase) ListAllInstances(_ context.Context) ([]params.Instance, error) { var instances []Instance - q := s.conn. - Preload("Pool"). - Preload("ScaleSet"). - Preload("Job"). - Find(&instances) + q := s.conn.Model(&Instance{}).Preload("Job").Find(&instances) if q.Error != nil { - return nil, fmt.Errorf("error fetching instances: %w", q.Error) + return nil, errors.Wrap(q.Error, "fetching instances") } ret := make([]params.Instance, len(instances)) var err error for idx, instance := range instances { ret[idx], err = s.sqlToParamsInstance(instance) if err != nil { - return nil, fmt.Errorf("error converting instance: %w", err) + return nil, errors.Wrap(err, "converting instance") } } return ret, nil @@ -369,13 +321,13 @@ func (s *sqlDatabase) ListAllInstances(_ context.Context) ([]params.Instance, er func (s *sqlDatabase) PoolInstanceCount(_ context.Context, poolID string) (int64, error) { pool, err := s.getPoolByID(s.conn, poolID) if err != nil { - return 0, fmt.Errorf("error fetching pool: %w", err) + return 0, errors.Wrap(err, "fetching pool") } var cnt int64 q := s.conn.Model(&Instance{}).Where("pool_id = ?", pool.ID).Count(&cnt) if q.Error != nil { - return 0, fmt.Errorf("error fetching instance count: %w", q.Error) + return 0, errors.Wrap(q.Error, "fetching instance count") } return cnt, nil } diff --git a/database/sql/instances_test.go b/database/sql/instances_test.go index 5ec55107..de37033d 100644 --- a/database/sql/instances_test.go +++ b/database/sql/instances_test.go @@ -84,7 +84,7 @@ func (s *InstancesTestSuite) SetupTest() { creds := garmTesting.CreateTestGithubCredentials(adminCtx, "new-creds", db, s.T(), githubEndpoint) // create an organization for testing purposes - org, err := s.Store.CreateOrganization(s.adminCtx, "test-org", creds, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) + org, err := s.Store.CreateOrganization(s.adminCtx, "test-org", creds.Name, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) if err != nil { s.FailNow(fmt.Sprintf("failed to create org: %s", err)) } @@ -119,12 +119,6 @@ func (s *InstancesTestSuite) SetupTest() { CallbackURL: "https://garm.example.com/", Status: commonParams.InstanceRunning, RunnerStatus: params.RunnerIdle, - JitConfiguration: map[string]string{ - "secret": fmt.Sprintf("secret-%d", i), - }, - AditionalLabels: []string{ - fmt.Sprintf("label-%d", i), - }, }, ) if err != nil { @@ -196,7 +190,7 @@ func (s *InstancesTestSuite) TestCreateInstance() { // assertions s.Require().Nil(err) - storeInstance, err := s.Store.GetInstance(s.adminCtx, s.Fixtures.CreateInstanceParams.Name) + storeInstance, err := s.Store.GetInstanceByName(s.adminCtx, s.Fixtures.CreateInstanceParams.Name) if err != nil { s.FailNow(fmt.Sprintf("failed to get instance: %v", err)) } @@ -210,7 +204,7 @@ func (s *InstancesTestSuite) TestCreateInstance() { func (s *InstancesTestSuite) TestCreateInstanceInvalidPoolID() { _, err := s.Store.CreateInstance(s.adminCtx, "dummy-pool-id", params.CreateInstanceParams{}) - s.Require().Equal("error fetching pool: error parsing id: invalid request", err.Error()) + s.Require().Equal("fetching pool: parsing id: invalid request", err.Error()) } func (s *InstancesTestSuite) TestCreateInstanceDBCreateErr() { @@ -233,13 +227,32 @@ func (s *InstancesTestSuite) TestCreateInstanceDBCreateErr() { s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("error creating instance: mocked insert instance error", err.Error()) + s.Require().Equal("creating instance: mocked insert instance error", err.Error()) +} + +func (s *InstancesTestSuite) TestGetPoolInstanceByName() { + storeInstance := s.Fixtures.Instances[0] // this is already created in `SetupTest()` + + instance, err := s.Store.GetPoolInstanceByName(s.adminCtx, s.Fixtures.Pool.ID, storeInstance.Name) + + s.Require().Nil(err) + s.Require().Equal(storeInstance.Name, instance.Name) + s.Require().Equal(storeInstance.PoolID, instance.PoolID) + s.Require().Equal(storeInstance.OSArch, instance.OSArch) + s.Require().Equal(storeInstance.OSType, instance.OSType) + s.Require().Equal(storeInstance.CallbackURL, instance.CallbackURL) +} + +func (s *InstancesTestSuite) TestGetPoolInstanceByNameNotFound() { + _, err := s.Store.GetPoolInstanceByName(s.adminCtx, s.Fixtures.Pool.ID, "not-existent-instance-name") + + s.Require().Equal("fetching instance: fetching pool instance by name: not found", err.Error()) } func (s *InstancesTestSuite) TestGetInstanceByName() { storeInstance := s.Fixtures.Instances[1] - instance, err := s.Store.GetInstance(s.adminCtx, storeInstance.Name) + instance, err := s.Store.GetInstanceByName(s.adminCtx, storeInstance.Name) s.Require().Nil(err) s.Require().Equal(storeInstance.Name, instance.Name) @@ -250,9 +263,9 @@ func (s *InstancesTestSuite) TestGetInstanceByName() { } func (s *InstancesTestSuite) TestGetInstanceByNameFetchInstanceFailed() { - _, err := s.Store.GetInstance(s.adminCtx, "not-existent-instance-name") + _, err := s.Store.GetInstanceByName(s.adminCtx, "not-existent-instance-name") - s.Require().Equal("error fetching instance: error fetching instance by name: not found", err.Error()) + s.Require().Equal("fetching instance: fetching instance by name: not found", err.Error()) } func (s *InstancesTestSuite) TestDeleteInstance() { @@ -262,31 +275,14 @@ func (s *InstancesTestSuite) TestDeleteInstance() { s.Require().Nil(err) - _, err = s.Store.GetInstance(s.adminCtx, storeInstance.Name) - s.Require().Equal("error fetching instance: error fetching instance by name: not found", err.Error()) - - err = s.Store.DeleteInstance(s.adminCtx, s.Fixtures.Pool.ID, storeInstance.Name) - s.Require().Nil(err) -} - -func (s *InstancesTestSuite) TestDeleteInstanceByName() { - storeInstance := s.Fixtures.Instances[0] - - err := s.Store.DeleteInstanceByName(s.adminCtx, storeInstance.Name) - - s.Require().Nil(err) - - _, err = s.Store.GetInstance(s.adminCtx, storeInstance.Name) - s.Require().Equal("error fetching instance: error fetching instance by name: not found", err.Error()) - - err = s.Store.DeleteInstanceByName(s.adminCtx, storeInstance.Name) - s.Require().Nil(err) + _, err = s.Store.GetPoolInstanceByName(s.adminCtx, s.Fixtures.Pool.ID, storeInstance.Name) + s.Require().Equal("fetching instance: fetching pool instance by name: not found", err.Error()) } func (s *InstancesTestSuite) TestDeleteInstanceInvalidPoolID() { err := s.Store.DeleteInstance(s.adminCtx, "dummy-pool-id", "dummy-instance-name") - s.Require().Equal("error deleting instance: error fetching pool: error parsing id: invalid request", err.Error()) + s.Require().Equal("deleting instance: fetching pool: parsing id: invalid request", err.Error()) } func (s *InstancesTestSuite) TestDeleteInstanceDBRecordNotFoundErr() { @@ -361,7 +357,7 @@ func (s *InstancesTestSuite) TestDeleteInstanceDBDeleteErr() { s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("error deleting instance: mocked delete instance error", err.Error()) + s.Require().Equal("deleting instance: mocked delete instance error", err.Error()) } func (s *InstancesTestSuite) TestAddInstanceEvent() { @@ -371,7 +367,7 @@ func (s *InstancesTestSuite) TestAddInstanceEvent() { err := s.Store.AddInstanceEvent(s.adminCtx, storeInstance.Name, params.StatusEvent, params.EventInfo, statusMsg) s.Require().Nil(err) - instance, err := s.Store.GetInstance(s.adminCtx, storeInstance.Name) + instance, err := s.Store.GetInstanceByName(s.adminCtx, storeInstance.Name) if err != nil { s.FailNow(fmt.Sprintf("failed to get db instance: %s", err)) } @@ -412,7 +408,7 @@ func (s *InstancesTestSuite) TestAddInstanceEventDBUpdateErr() { err := s.StoreSQLMocked.AddInstanceEvent(s.adminCtx, instance.Name, params.StatusEvent, params.EventInfo, statusMsg) s.Require().NotNil(err) - s.Require().Equal("error adding status message: mocked add status message error", err.Error()) + s.Require().Equal("adding status message: mocked add status message error", err.Error()) s.assertSQLMockExpectations() } @@ -457,7 +453,7 @@ func (s *InstancesTestSuite) TestUpdateInstanceDBUpdateInstanceErr() { _, err := s.StoreSQLMocked.UpdateInstance(s.adminCtx, instance.Name, s.Fixtures.UpdateInstanceParams) s.Require().NotNil(err) - s.Require().Equal("error updating instance: mocked update instance error", err.Error()) + s.Require().Equal("updating instance: mocked update instance error", err.Error()) s.assertSQLMockExpectations() } @@ -503,7 +499,7 @@ func (s *InstancesTestSuite) TestUpdateInstanceDBUpdateAddressErr() { _, err := s.StoreSQLMocked.UpdateInstance(s.adminCtx, instance.Name, s.Fixtures.UpdateInstanceParams) s.Require().NotNil(err) - s.Require().Equal("error updating addresses: update addresses mock error", err.Error()) + s.Require().Equal("updating addresses: update addresses mock error", err.Error()) s.assertSQLMockExpectations() } @@ -517,7 +513,7 @@ func (s *InstancesTestSuite) TestListPoolInstances() { func (s *InstancesTestSuite) TestListPoolInstancesInvalidPoolID() { _, err := s.Store.ListPoolInstances(s.adminCtx, "dummy-pool-id") - s.Require().Equal("error parsing id: invalid request", err.Error()) + s.Require().Equal("parsing id: invalid request", err.Error()) } func (s *InstancesTestSuite) TestListAllInstances() { @@ -536,7 +532,7 @@ func (s *InstancesTestSuite) TestListAllInstancesDBFetchErr() { s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("error fetching instances: fetch instances mock error", err.Error()) + s.Require().Equal("fetching instances: fetch instances mock error", err.Error()) } func (s *InstancesTestSuite) TestPoolInstanceCount() { @@ -549,7 +545,7 @@ func (s *InstancesTestSuite) TestPoolInstanceCount() { func (s *InstancesTestSuite) TestPoolInstanceCountInvalidPoolID() { _, err := s.Store.PoolInstanceCount(s.adminCtx, "dummy-pool-id") - s.Require().Equal("error fetching pool: error parsing id: invalid request", err.Error()) + s.Require().Equal("fetching pool: parsing id: invalid request", err.Error()) } func (s *InstancesTestSuite) TestPoolInstanceCountDBCountErr() { @@ -568,9 +564,10 @@ func (s *InstancesTestSuite) TestPoolInstanceCountDBCountErr() { s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("error fetching instance count: count mock error", err.Error()) + s.Require().Equal("fetching instance count: count mock error", err.Error()) } func TestInstTestSuite(t *testing.T) { + t.Parallel() suite.Run(t, new(InstancesTestSuite)) } diff --git a/database/sql/jobs.go b/database/sql/jobs.go index ffa3a7b5..9cbf2ffe 100644 --- a/database/sql/jobs.go +++ b/database/sql/jobs.go @@ -1,27 +1,12 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package sql import ( "context" "encoding/json" - "errors" - "fmt" "log/slog" "github.com/google/uuid" + "github.com/pkg/errors" "gorm.io/gorm" "gorm.io/gorm/clause" @@ -36,14 +21,12 @@ func sqlWorkflowJobToParamsJob(job WorkflowJob) (params.Job, error) { labels := []string{} if job.Labels != nil { if err := json.Unmarshal(job.Labels, &labels); err != nil { - return params.Job{}, fmt.Errorf("error unmarshaling labels: %w", err) + return params.Job{}, errors.Wrap(err, "unmarshaling labels") } } jobParam := params.Job{ ID: job.ID, - WorkflowJobID: job.WorkflowJobID, - ScaleSetJobID: job.ScaleSetJobID, RunID: job.RunID, Action: job.Action, Status: job.Status, @@ -74,12 +57,11 @@ func sqlWorkflowJobToParamsJob(job WorkflowJob) (params.Job, error) { func (s *sqlDatabase) paramsJobToWorkflowJob(ctx context.Context, job params.Job) (WorkflowJob, error) { asJSON, err := json.Marshal(job.Labels) if err != nil { - return WorkflowJob{}, fmt.Errorf("error marshaling labels: %w", err) + return WorkflowJob{}, errors.Wrap(err, "marshaling labels") } workflofJob := WorkflowJob{ - ScaleSetJobID: job.ScaleSetJobID, - WorkflowJobID: job.WorkflowJobID, + ID: job.ID, RunID: job.RunID, Action: job.Action, Status: job.Status, @@ -100,7 +82,7 @@ func (s *sqlDatabase) paramsJobToWorkflowJob(ctx context.Context, job params.Job } if job.RunnerName != "" { - instance, err := s.getInstance(s.ctx, job.RunnerName) + instance, err := s.getInstanceByName(s.ctx, job.RunnerName) if err != nil { // This usually is very normal as not all jobs run on our runners. slog.DebugContext(ctx, "failed to get instance by name", "instance_name", job.RunnerName) @@ -113,49 +95,42 @@ func (s *sqlDatabase) paramsJobToWorkflowJob(ctx context.Context, job params.Job } func (s *sqlDatabase) DeleteJob(_ context.Context, jobID int64) (err error) { - var workflowJob WorkflowJob - q := s.conn.Where("workflow_job_id = ?", jobID).Preload("Instance").First(&workflowJob) - if q.Error != nil { - if errors.Is(q.Error, gorm.ErrRecordNotFound) { - return nil - } - return fmt.Errorf("error fetching job: %w", q.Error) - } - removedJob, err := sqlWorkflowJobToParamsJob(workflowJob) - if err != nil { - return fmt.Errorf("error converting job: %w", err) - } + s.writeMux.Lock() + defer s.writeMux.Unlock() defer func() { if err == nil { - if notifyErr := s.sendNotify(common.JobEntityType, common.DeleteOperation, removedJob); notifyErr != nil { + if notifyErr := s.sendNotify(common.JobEntityType, common.DeleteOperation, params.Job{ID: jobID}); notifyErr != nil { slog.With(slog.Any("error", notifyErr)).Error("failed to send notify") } } }() - q = s.conn.Delete(&workflowJob) + q := s.conn.Delete(&WorkflowJob{}, jobID) if q.Error != nil { if errors.Is(q.Error, gorm.ErrRecordNotFound) { return nil } - return fmt.Errorf("error deleting job: %w", q.Error) + return errors.Wrap(q.Error, "deleting job") } return nil } func (s *sqlDatabase) LockJob(_ context.Context, jobID int64, entityID string) error { + s.writeMux.Lock() + defer s.writeMux.Unlock() + entityUUID, err := uuid.Parse(entityID) if err != nil { - return fmt.Errorf("error parsing entity id: %w", err) + return errors.Wrap(err, "parsing entity id") } var workflowJob WorkflowJob - q := s.conn.Preload("Instance").Where("workflow_job_id = ?", jobID).First(&workflowJob) + q := s.conn.Clauses(clause.Locking{Strength: "UPDATE"}).Preload("Instance").Where("id = ?", jobID).First(&workflowJob) if q.Error != nil { if errors.Is(q.Error, gorm.ErrRecordNotFound) { return runnerErrors.ErrNotFound } - return fmt.Errorf("error fetching job: %w", q.Error) + return errors.Wrap(q.Error, "fetching job") } if workflowJob.LockedBy.String() == entityID { @@ -170,12 +145,12 @@ func (s *sqlDatabase) LockJob(_ context.Context, jobID int64, entityID string) e workflowJob.LockedBy = entityUUID if err := s.conn.Save(&workflowJob).Error; err != nil { - return fmt.Errorf("error saving job: %w", err) + return errors.Wrap(err, "saving job") } asParams, err := sqlWorkflowJobToParamsJob(workflowJob) if err != nil { - return fmt.Errorf("error converting job: %w", err) + return errors.Wrap(err, "converting job") } s.sendNotify(common.JobEntityType, common.UpdateOperation, asParams) @@ -183,14 +158,17 @@ func (s *sqlDatabase) LockJob(_ context.Context, jobID int64, entityID string) e } func (s *sqlDatabase) BreakLockJobIsQueued(_ context.Context, jobID int64) (err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + var workflowJob WorkflowJob - q := s.conn.Clauses(clause.Locking{Strength: "UPDATE"}).Preload("Instance").Where("workflow_job_id = ? and status = ?", jobID, params.JobStatusQueued).First(&workflowJob) + q := s.conn.Clauses(clause.Locking{Strength: "UPDATE"}).Preload("Instance").Where("id = ? and status = ?", jobID, params.JobStatusQueued).First(&workflowJob) if q.Error != nil { if errors.Is(q.Error, gorm.ErrRecordNotFound) { return nil } - return fmt.Errorf("error fetching job: %w", q.Error) + return errors.Wrap(q.Error, "fetching job") } if workflowJob.LockedBy == uuid.Nil { @@ -200,25 +178,28 @@ func (s *sqlDatabase) BreakLockJobIsQueued(_ context.Context, jobID int64) (err workflowJob.LockedBy = uuid.Nil if err := s.conn.Save(&workflowJob).Error; err != nil { - return fmt.Errorf("error saving job: %w", err) + return errors.Wrap(err, "saving job") } asParams, err := sqlWorkflowJobToParamsJob(workflowJob) if err != nil { - return fmt.Errorf("error converting job: %w", err) + return errors.Wrap(err, "converting job") } s.sendNotify(common.JobEntityType, common.UpdateOperation, asParams) return nil } func (s *sqlDatabase) UnlockJob(_ context.Context, jobID int64, entityID string) error { + s.writeMux.Lock() + defer s.writeMux.Unlock() + var workflowJob WorkflowJob - q := s.conn.Clauses(clause.Locking{Strength: "UPDATE"}).Where("workflow_job_id = ?", jobID).First(&workflowJob) + q := s.conn.Clauses(clause.Locking{Strength: "UPDATE"}).Where("id = ?", jobID).First(&workflowJob) if q.Error != nil { if errors.Is(q.Error, gorm.ErrRecordNotFound) { return runnerErrors.ErrNotFound } - return fmt.Errorf("error fetching job: %w", q.Error) + return errors.Wrap(q.Error, "fetching job") } if workflowJob.LockedBy == uuid.Nil { @@ -232,32 +213,28 @@ func (s *sqlDatabase) UnlockJob(_ context.Context, jobID int64, entityID string) workflowJob.LockedBy = uuid.Nil if err := s.conn.Save(&workflowJob).Error; err != nil { - return fmt.Errorf("error saving job: %w", err) + return errors.Wrap(err, "saving job") } asParams, err := sqlWorkflowJobToParamsJob(workflowJob) if err != nil { - return fmt.Errorf("error converting job: %w", err) + return errors.Wrap(err, "converting job") } s.sendNotify(common.JobEntityType, common.UpdateOperation, asParams) return nil } func (s *sqlDatabase) CreateOrUpdateJob(ctx context.Context, job params.Job) (params.Job, error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + var workflowJob WorkflowJob var err error - - searchField := "workflow_job_id = ?" - var searchVal any = job.WorkflowJobID - if job.ScaleSetJobID != "" { - searchField = "scale_set_job_id = ?" - searchVal = job.ScaleSetJobID - } - q := s.conn.Preload("Instance").Where(searchField, searchVal).First(&workflowJob) + q := s.conn.Clauses(clause.Locking{Strength: "UPDATE"}).Preload("Instance").Where("id = ?", job.ID).First(&workflowJob) if q.Error != nil { if !errors.Is(q.Error, gorm.ErrRecordNotFound) { - return params.Job{}, fmt.Errorf("error fetching job: %w", q.Error) + return params.Job{}, errors.Wrap(q.Error, "fetching job") } } var operation common.OperationType @@ -273,16 +250,13 @@ func (s *sqlDatabase) CreateOrUpdateJob(ctx context.Context, job params.Job) (pa workflowJob.GithubRunnerID = job.GithubRunnerID workflowJob.RunnerGroupID = job.RunnerGroupID workflowJob.RunnerGroupName = job.RunnerGroupName - if job.RunID != 0 && workflowJob.RunID == 0 { - workflowJob.RunID = job.RunID - } if job.LockedBy != uuid.Nil { workflowJob.LockedBy = job.LockedBy } if job.RunnerName != "" { - instance, err := s.getInstance(ctx, job.RunnerName) + instance, err := s.getInstanceByName(ctx, job.RunnerName) if err == nil { workflowJob.InstanceID = &instance.ID } else { @@ -303,23 +277,23 @@ func (s *sqlDatabase) CreateOrUpdateJob(ctx context.Context, job params.Job) (pa workflowJob.EnterpriseID = job.EnterpriseID } if err := s.conn.Save(&workflowJob).Error; err != nil { - return params.Job{}, fmt.Errorf("error saving job: %w", err) + return params.Job{}, errors.Wrap(err, "saving job") } } else { operation = common.CreateOperation workflowJob, err = s.paramsJobToWorkflowJob(ctx, job) if err != nil { - return params.Job{}, fmt.Errorf("error converting job: %w", err) + return params.Job{}, errors.Wrap(err, "converting job") } if err := s.conn.Create(&workflowJob).Error; err != nil { - return params.Job{}, fmt.Errorf("error creating job: %w", err) + return params.Job{}, errors.Wrap(err, "creating job") } } asParams, err := sqlWorkflowJobToParamsJob(workflowJob) if err != nil { - return params.Job{}, fmt.Errorf("error converting job: %w", err) + return params.Job{}, errors.Wrap(err, "converting job") } s.sendNotify(common.JobEntityType, operation, asParams) @@ -339,7 +313,7 @@ func (s *sqlDatabase) ListJobsByStatus(_ context.Context, status params.JobStatu for idx, job := range jobs { jobParam, err := sqlWorkflowJobToParamsJob(job) if err != nil { - return nil, fmt.Errorf("error converting job: %w", err) + return nil, errors.Wrap(err, "converting job") } ret[idx] = jobParam } @@ -347,25 +321,21 @@ func (s *sqlDatabase) ListJobsByStatus(_ context.Context, status params.JobStatu } // ListEntityJobsByStatus lists all jobs for a given entity type and id. -func (s *sqlDatabase) ListEntityJobsByStatus(_ context.Context, entityType params.ForgeEntityType, entityID string, status params.JobStatus) ([]params.Job, error) { +func (s *sqlDatabase) ListEntityJobsByStatus(_ context.Context, entityType params.GithubEntityType, entityID string, status params.JobStatus) ([]params.Job, error) { u, err := uuid.Parse(entityID) if err != nil { return nil, err } var jobs []WorkflowJob - query := s.conn. - Model(&WorkflowJob{}). - Preload("Instance"). - Where("status = ?", status). - Where("workflow_job_id > 0") + query := s.conn.Model(&WorkflowJob{}).Preload("Instance").Where("status = ?", status) switch entityType { - case params.ForgeEntityTypeOrganization: + case params.GithubEntityTypeOrganization: query = query.Where("org_id = ?", u) - case params.ForgeEntityTypeRepository: + case params.GithubEntityTypeRepository: query = query.Where("repo_id = ?", u) - case params.ForgeEntityTypeEnterprise: + case params.GithubEntityTypeEnterprise: query = query.Where("enterprise_id = ?", u) } @@ -380,7 +350,7 @@ func (s *sqlDatabase) ListEntityJobsByStatus(_ context.Context, entityType param for idx, job := range jobs { jobParam, err := sqlWorkflowJobToParamsJob(job) if err != nil { - return nil, fmt.Errorf("error converting job: %w", err) + return nil, errors.Wrap(err, "converting job") } ret[idx] = jobParam } @@ -402,7 +372,7 @@ func (s *sqlDatabase) ListAllJobs(_ context.Context) ([]params.Job, error) { for idx, job := range jobs { jobParam, err := sqlWorkflowJobToParamsJob(job) if err != nil { - return nil, fmt.Errorf("error converting job: %w", err) + return nil, errors.Wrap(err, "converting job") } ret[idx] = jobParam } @@ -412,7 +382,7 @@ func (s *sqlDatabase) ListAllJobs(_ context.Context) ([]params.Job, error) { // GetJobByID gets a job by id. func (s *sqlDatabase) GetJobByID(_ context.Context, jobID int64) (params.Job, error) { var job WorkflowJob - query := s.conn.Model(&WorkflowJob{}).Preload("Instance").Where("workflow_job_id = ?", jobID) + query := s.conn.Model(&WorkflowJob{}).Preload("Instance").Where("id = ?", jobID) if err := query.First(&job); err.Error != nil { if errors.Is(err.Error, gorm.ErrRecordNotFound) { @@ -426,6 +396,9 @@ func (s *sqlDatabase) GetJobByID(_ context.Context, jobID int64) (params.Job, er // DeleteCompletedJobs deletes all completed jobs. func (s *sqlDatabase) DeleteCompletedJobs(_ context.Context) error { + s.writeMux.Lock() + defer s.writeMux.Unlock() + query := s.conn.Model(&WorkflowJob{}).Where("status = ?", params.JobStatusCompleted) if err := query.Unscoped().Delete(&WorkflowJob{}); err.Error != nil { diff --git a/database/sql/models.go b/database/sql/models.go index d3cb044a..ac7a056a 100644 --- a/database/sql/models.go +++ b/database/sql/models.go @@ -1,4 +1,4 @@ -// Copyright 2025 Cloudbase Solutions SRL +// Copyright 2022 Cloudbase Solutions SRL // // Licensed under the Apache License, Version 2.0 (the "License"); you may // not use this file except in compliance with the License. You may obtain @@ -15,10 +15,10 @@ package sql import ( - "fmt" "time" "github.com/google/uuid" + "github.com/pkg/errors" "gorm.io/datatypes" "gorm.io/gorm" @@ -40,28 +40,12 @@ func (b *Base) BeforeCreate(_ *gorm.DB) error { } newID, err := uuid.NewRandom() if err != nil { - return fmt.Errorf("error generating id: %w", err) + return errors.Wrap(err, "generating id") } b.ID = newID return nil } -type ControllerInfo struct { - Base - - ControllerID uuid.UUID - - CallbackURL string - MetadataURL string - WebhookBaseURL string - // MinimumJobAgeBackoff is the minimum time that a job must be in the queue - // before GARM will attempt to allocate a runner to service it. This backoff - // is useful if you have idle runners in various pools that could potentially - // pick up the job. GARM would allow this amount of time for runners to react - // before spinning up a new one and potentially having to scale down later. - MinimumJobAgeBackoff uint -} - type Tag struct { Base @@ -102,152 +86,59 @@ type Pool struct { Priority uint `gorm:"index:idx_pool_priority"` } -// ScaleSet represents a github scale set. Scale sets are almost identical to pools with a few -// notable exceptions: -// - Labels are no longer relevant -// - Workflows will use the scaleset name to target runners. -// - A scale set is a stand alone unit. If a workflow targets a scale set, no other runner will pick up that job. -type ScaleSet struct { - gorm.Model - - // ScaleSetID is the github ID of the scale set. This field may not be set if - // the scale set was ceated in GARM but has not yet been created in GitHub. - // The scale set ID is also not globally unique. It is only unique within the context - // of an entity. - ScaleSetID int `gorm:"index:idx_scale_set"` - Name string `gorm:"unique_index:idx_name"` - GitHubRunnerGroup string `gorm:"unique_index:idx_name"` - DisableUpdate bool - - // State stores the provisioning state of the scale set in GitHub - State params.ScaleSetState - // ExtendedState stores a more detailed message regarding the State. - // If an error occurs, the reason for the error will be stored here. - ExtendedState string - - ProviderName string - RunnerPrefix string - MaxRunners uint - MinIdleRunners uint - RunnerBootstrapTimeout uint - Image string - Flavor string - OSType commonParams.OSType - OSArch commonParams.OSArch - Enabled bool - LastMessageID int64 - DesiredRunnerCount int - // ExtraSpecs is an opaque json that gets sent to the provider - // as part of the bootstrap params for instances. It can contain - // any kind of data needed by providers. - ExtraSpecs datatypes.JSON - - RepoID *uuid.UUID `gorm:"index"` - Repository Repository `gorm:"foreignKey:RepoID;"` - - OrgID *uuid.UUID `gorm:"index"` - Organization Organization `gorm:"foreignKey:OrgID"` - - EnterpriseID *uuid.UUID `gorm:"index"` - Enterprise Enterprise `gorm:"foreignKey:EnterpriseID"` - - Instances []Instance `gorm:"foreignKey:ScaleSetFkID"` -} - -type RepositoryEvent struct { - gorm.Model - - EventType params.EventType - EventLevel params.EventLevel - Message string `gorm:"type:text"` - - RepoID uuid.UUID `gorm:"index:idx_repo_event"` - Repo Repository `gorm:"foreignKey:RepoID"` -} - type Repository struct { Base + CredentialsName string + CredentialsID *uint `gorm:"index"` Credentials GithubCredentials `gorm:"foreignKey:CredentialsID;constraint:OnDelete:SET NULL"` - GiteaCredentialsID *uint `gorm:"index"` - GiteaCredentials GiteaCredentials `gorm:"foreignKey:GiteaCredentialsID;constraint:OnDelete:SET NULL"` - Owner string `gorm:"index:idx_owner_nocase,unique,collate:nocase"` Name string `gorm:"index:idx_owner_nocase,unique,collate:nocase"` WebhookSecret []byte Pools []Pool `gorm:"foreignKey:RepoID"` - ScaleSets []ScaleSet `gorm:"foreignKey:RepoID"` Jobs []WorkflowJob `gorm:"foreignKey:RepoID;constraint:OnDelete:SET NULL"` PoolBalancerType params.PoolBalancerType `gorm:"type:varchar(64)"` EndpointName *string `gorm:"index:idx_owner_nocase,unique,collate:nocase"` Endpoint GithubEndpoint `gorm:"foreignKey:EndpointName;constraint:OnDelete:SET NULL"` - - Events []RepositoryEvent `gorm:"foreignKey:RepoID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` } -type OrganizationEvent struct { - gorm.Model - - EventType params.EventType - EventLevel params.EventLevel - Message string `gorm:"type:text"` - - OrgID uuid.UUID `gorm:"index:idx_org_event"` - Org Organization `gorm:"foreignKey:OrgID"` -} type Organization struct { Base + CredentialsName string + CredentialsID *uint `gorm:"index"` Credentials GithubCredentials `gorm:"foreignKey:CredentialsID;constraint:OnDelete:SET NULL"` - GiteaCredentialsID *uint `gorm:"index"` - GiteaCredentials GiteaCredentials `gorm:"foreignKey:GiteaCredentialsID;constraint:OnDelete:SET NULL"` - Name string `gorm:"index:idx_org_name_nocase,collate:nocase"` WebhookSecret []byte Pools []Pool `gorm:"foreignKey:OrgID"` - ScaleSet []ScaleSet `gorm:"foreignKey:OrgID"` Jobs []WorkflowJob `gorm:"foreignKey:OrgID;constraint:OnDelete:SET NULL"` PoolBalancerType params.PoolBalancerType `gorm:"type:varchar(64)"` EndpointName *string `gorm:"index:idx_org_name_nocase,collate:nocase"` Endpoint GithubEndpoint `gorm:"foreignKey:EndpointName;constraint:OnDelete:SET NULL"` - - Events []OrganizationEvent `gorm:"foreignKey:OrgID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` -} - -type EnterpriseEvent struct { - gorm.Model - - EventType params.EventType - EventLevel params.EventLevel - Message string `gorm:"type:text"` - - EnterpriseID uuid.UUID `gorm:"index:idx_enterprise_event"` - Enterprise Enterprise `gorm:"foreignKey:EnterpriseID"` } type Enterprise struct { Base + CredentialsName string + CredentialsID *uint `gorm:"index"` Credentials GithubCredentials `gorm:"foreignKey:CredentialsID;constraint:OnDelete:SET NULL"` Name string `gorm:"index:idx_ent_name_nocase,collate:nocase"` WebhookSecret []byte Pools []Pool `gorm:"foreignKey:EnterpriseID"` - ScaleSet []ScaleSet `gorm:"foreignKey:EnterpriseID"` Jobs []WorkflowJob `gorm:"foreignKey:EnterpriseID;constraint:OnDelete:SET NULL"` PoolBalancerType params.PoolBalancerType `gorm:"type:varchar(64)"` EndpointName *string `gorm:"index:idx_ent_name_nocase,collate:nocase"` Endpoint GithubEndpoint `gorm:"foreignKey:EndpointName;constraint:OnDelete:SET NULL"` - - Events []EnterpriseEvent `gorm:"foreignKey:EnterpriseID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` } type Address struct { @@ -293,12 +184,9 @@ type Instance struct { GitHubRunnerGroup string AditionalLabels datatypes.JSON - PoolID *uuid.UUID + PoolID uuid.UUID Pool Pool `gorm:"foreignKey:PoolID"` - ScaleSetFkID *uint - ScaleSet ScaleSet `gorm:"foreignKey:ScaleSetFkID"` - StatusMessages []InstanceStatusUpdate `gorm:"foreignKey:InstanceID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` Job *WorkflowJob `gorm:"foreignKey:InstanceID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` @@ -316,15 +204,25 @@ type User struct { Enabled bool } +type ControllerInfo struct { + Base + + ControllerID uuid.UUID + + CallbackURL string + MetadataURL string + WebhookBaseURL string + // MinimumJobAgeBackoff is the minimum time that a job must be in the queue + // before GARM will attempt to allocate a runner to service it. This backoff + // is useful if you have idle runners in various pools that could potentially + // pick up the job. GARM would allow this amount of time for runners to react + // before spinning up a new one and potentially having to scale down later. + MinimumJobAgeBackoff uint +} + type WorkflowJob struct { // ID is the ID of the job. ID int64 `gorm:"index"` - - // WorkflowJobID is the ID of the workflow job. - WorkflowJobID int64 `gorm:"index:workflow_job_id_idx"` - // ScaleSetJobID is the job ID for a scaleset job. - ScaleSetJobID string `gorm:"index:scaleset_job_id_idx"` - // RunID is the ID of the workflow run. A run may have multiple jobs. RunID int64 // Action is the specific activity that triggered the event. @@ -387,8 +285,6 @@ type GithubEndpoint struct { UpdatedAt time.Time DeletedAt gorm.DeletedAt `gorm:"index"` - EndpointType params.EndpointType `gorm:"index:idx_endpoint_type"` - Description string `gorm:"type:text"` APIBaseURL string `gorm:"type:text collate nocase"` UploadBaseURL string `gorm:"type:text collate nocase"` @@ -403,9 +299,9 @@ type GithubCredentials struct { UserID *uuid.UUID `gorm:"index:idx_github_credentials,unique"` User User `gorm:"foreignKey:UserID"` - Description string `gorm:"type:text"` - AuthType params.ForgeAuthType `gorm:"index"` - Payload []byte `gorm:"type:longblob"` + Description string `gorm:"type:text"` + AuthType params.GithubAuthType `gorm:"index"` + Payload []byte `gorm:"type:longblob"` Endpoint GithubEndpoint `gorm:"foreignKey:EndpointName"` EndpointName *string `gorm:"index"` @@ -414,21 +310,3 @@ type GithubCredentials struct { Organizations []Organization `gorm:"foreignKey:CredentialsID"` Enterprises []Enterprise `gorm:"foreignKey:CredentialsID"` } - -type GiteaCredentials struct { - gorm.Model - - Name string `gorm:"index:idx_gitea_credentials,unique;type:varchar(64) collate nocase"` - UserID *uuid.UUID `gorm:"index:idx_gitea_credentials,unique"` - User User `gorm:"foreignKey:UserID"` - - Description string `gorm:"type:text"` - AuthType params.ForgeAuthType `gorm:"index"` - Payload []byte `gorm:"type:longblob"` - - Endpoint GithubEndpoint `gorm:"foreignKey:EndpointName"` - EndpointName *string `gorm:"index"` - - Repositories []Repository `gorm:"foreignKey:GiteaCredentialsID"` - Organizations []Organization `gorm:"foreignKey:GiteaCredentialsID"` -} diff --git a/database/sql/organizations.go b/database/sql/organizations.go index 22be6272..3c2cdbbf 100644 --- a/database/sql/organizations.go +++ b/database/sql/organizations.go @@ -16,11 +16,11 @@ package sql import ( "context" - "errors" "fmt" "log/slog" "github.com/google/uuid" + "github.com/pkg/errors" "gorm.io/gorm" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -29,88 +29,88 @@ import ( "github.com/cloudbase/garm/params" ) -func (s *sqlDatabase) CreateOrganization(ctx context.Context, name string, credentials params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (param params.Organization, err error) { +func (s *sqlDatabase) CreateOrganization(ctx context.Context, name, credentialsName, webhookSecret string, poolBalancerType params.PoolBalancerType) (org params.Organization, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + if webhookSecret == "" { return params.Organization{}, errors.New("creating org: missing secret") } secret, err := util.Seal([]byte(webhookSecret), []byte(s.cfg.Passphrase)) if err != nil { - return params.Organization{}, fmt.Errorf("error encoding secret: %w", err) + return params.Organization{}, errors.Wrap(err, "encoding secret") } defer func() { if err == nil { - s.sendNotify(common.OrganizationEntityType, common.CreateOperation, param) + s.sendNotify(common.OrganizationEntityType, common.CreateOperation, org) } }() newOrg := Organization{ Name: name, WebhookSecret: secret, + CredentialsName: credentialsName, PoolBalancerType: poolBalancerType, } err = s.conn.Transaction(func(tx *gorm.DB) error { - switch credentials.ForgeType { - case params.GithubEndpointType: - newOrg.CredentialsID = &credentials.ID - case params.GiteaEndpointType: - newOrg.GiteaCredentialsID = &credentials.ID - default: - return fmt.Errorf("unsupported credentials type: %w", runnerErrors.ErrBadRequest) + creds, err := s.getGithubCredentialsByName(ctx, tx, credentialsName, false) + if err != nil { + return errors.Wrap(err, "creating org") } + if creds.EndpointName == nil { + return errors.Wrap(runnerErrors.ErrUnprocessable, "credentials have no endpoint") + } + newOrg.CredentialsID = &creds.ID + newOrg.CredentialsName = creds.Name + newOrg.EndpointName = creds.EndpointName - newOrg.EndpointName = &credentials.Endpoint.Name q := tx.Create(&newOrg) if q.Error != nil { - return fmt.Errorf("error creating org: %w", q.Error) + return errors.Wrap(q.Error, "creating org") } + + newOrg.Credentials = creds + newOrg.Endpoint = creds.Endpoint + return nil }) if err != nil { - return params.Organization{}, fmt.Errorf("error creating org: %w", err) + return params.Organization{}, errors.Wrap(err, "creating org") } - ret, err := s.GetOrganizationByID(ctx, newOrg.ID.String()) + org, err = s.sqlToCommonOrganization(newOrg, true) if err != nil { - return params.Organization{}, fmt.Errorf("error creating org: %w", err) + return params.Organization{}, errors.Wrap(err, "creating org") } + org.WebhookSecret = webhookSecret - return ret, nil + return org, nil } func (s *sqlDatabase) GetOrganization(ctx context.Context, name, endpointName string) (params.Organization, error) { org, err := s.getOrg(ctx, name, endpointName) if err != nil { - return params.Organization{}, fmt.Errorf("error fetching org: %w", err) + return params.Organization{}, errors.Wrap(err, "fetching org") } param, err := s.sqlToCommonOrganization(org, true) if err != nil { - return params.Organization{}, fmt.Errorf("error fetching org: %w", err) + return params.Organization{}, errors.Wrap(err, "fetching org") } return param, nil } -func (s *sqlDatabase) ListOrganizations(_ context.Context, filter params.OrganizationFilter) ([]params.Organization, error) { +func (s *sqlDatabase) ListOrganizations(_ context.Context) ([]params.Organization, error) { var orgs []Organization q := s.conn. Preload("Credentials"). - Preload("GiteaCredentials"). Preload("Credentials.Endpoint"). - Preload("GiteaCredentials.Endpoint"). - Preload("Endpoint") - - if filter.Name != "" { - q = q.Where("name = ?", filter.Name) - } - - if filter.Endpoint != "" { - q = q.Where("endpoint_name = ?", filter.Endpoint) - } - q = q.Find(&orgs) + Preload("Endpoint"). + Find(&orgs) if q.Error != nil { - return []params.Organization{}, fmt.Errorf("error fetching org from database: %w", q.Error) + return []params.Organization{}, errors.Wrap(q.Error, "fetching org from database") } ret := make([]params.Organization, len(orgs)) @@ -118,7 +118,7 @@ func (s *sqlDatabase) ListOrganizations(_ context.Context, filter params.Organiz var err error ret[idx], err = s.sqlToCommonOrganization(val, true) if err != nil { - return nil, fmt.Errorf("error fetching org: %w", err) + return nil, errors.Wrap(err, "fetching org") } } @@ -126,9 +126,12 @@ func (s *sqlDatabase) ListOrganizations(_ context.Context, filter params.Organiz } func (s *sqlDatabase) DeleteOrganization(ctx context.Context, orgID string) (err error) { - org, err := s.getOrgByID(ctx, s.conn, orgID, "Endpoint", "Credentials", "Credentials.Endpoint", "GiteaCredentials", "GiteaCredentials.Endpoint") + s.writeMux.Lock() + defer s.writeMux.Unlock() + + org, err := s.getOrgByID(ctx, s.conn, orgID, "Endpoint", "Credentials", "Credentials.Endpoint") if err != nil { - return fmt.Errorf("error fetching org: %w", err) + return errors.Wrap(err, "fetching org") } defer func(org Organization) { @@ -144,13 +147,16 @@ func (s *sqlDatabase) DeleteOrganization(ctx context.Context, orgID string) (err q := s.conn.Unscoped().Delete(&org) if q.Error != nil && !errors.Is(q.Error, gorm.ErrRecordNotFound) { - return fmt.Errorf("error deleting org: %w", q.Error) + return errors.Wrap(q.Error, "deleting org") } return nil } func (s *sqlDatabase) UpdateOrganization(ctx context.Context, orgID string, param params.UpdateEntityParams) (paramOrg params.Organization, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + defer func() { if err == nil { s.sendNotify(common.OrganizationEntityType, common.UpdateOperation, paramOrg) @@ -162,23 +168,24 @@ func (s *sqlDatabase) UpdateOrganization(ctx context.Context, orgID string, para var err error org, err = s.getOrgByID(ctx, tx, orgID) if err != nil { - return fmt.Errorf("error fetching org: %w", err) + return errors.Wrap(err, "fetching org") } if org.EndpointName == nil { - return fmt.Errorf("error org has no endpoint: %w", runnerErrors.ErrUnprocessable) + return errors.Wrap(runnerErrors.ErrUnprocessable, "org has no endpoint") } if param.CredentialsName != "" { + org.CredentialsName = param.CredentialsName creds, err = s.getGithubCredentialsByName(ctx, tx, param.CredentialsName, false) if err != nil { - return fmt.Errorf("error fetching credentials: %w", err) + return errors.Wrap(err, "fetching credentials") } if creds.EndpointName == nil { - return fmt.Errorf("error credentials have no endpoint: %w", runnerErrors.ErrUnprocessable) + return errors.Wrap(runnerErrors.ErrUnprocessable, "credentials have no endpoint") } if *creds.EndpointName != *org.EndpointName { - return fmt.Errorf("error endpoint mismatch: %w", runnerErrors.ErrBadRequest) + return errors.Wrap(runnerErrors.ErrBadRequest, "endpoint mismatch") } org.CredentialsID = &creds.ID } @@ -197,44 +204,35 @@ func (s *sqlDatabase) UpdateOrganization(ctx context.Context, orgID string, para q := tx.Save(&org) if q.Error != nil { - return fmt.Errorf("error saving org: %w", q.Error) + return errors.Wrap(q.Error, "saving org") } return nil }) if err != nil { - return params.Organization{}, fmt.Errorf("error saving org: %w", err) + return params.Organization{}, errors.Wrap(err, "saving org") } - org, err = s.getOrgByID(ctx, s.conn, orgID, "Endpoint", "Credentials", "Credentials.Endpoint", "GiteaCredentials", "GiteaCredentials.Endpoint") + org, err = s.getOrgByID(ctx, s.conn, orgID, "Endpoint", "Credentials", "Credentials.Endpoint") if err != nil { - return params.Organization{}, fmt.Errorf("error updating enterprise: %w", err) + return params.Organization{}, errors.Wrap(err, "updating enterprise") } paramOrg, err = s.sqlToCommonOrganization(org, true) if err != nil { - return params.Organization{}, fmt.Errorf("error saving org: %w", err) + return params.Organization{}, errors.Wrap(err, "saving org") } return paramOrg, nil } func (s *sqlDatabase) GetOrganizationByID(ctx context.Context, orgID string) (params.Organization, error) { - preloadList := []string{ - "Pools", - "Credentials", - "Endpoint", - "Credentials.Endpoint", - "GiteaCredentials", - "GiteaCredentials.Endpoint", - "Events", - } - org, err := s.getOrgByID(ctx, s.conn, orgID, preloadList...) + org, err := s.getOrgByID(ctx, s.conn, orgID, "Pools", "Credentials", "Endpoint") if err != nil { - return params.Organization{}, fmt.Errorf("error fetching org: %w", err) + return params.Organization{}, errors.Wrap(err, "fetching org") } param, err := s.sqlToCommonOrganization(org, true) if err != nil { - return params.Organization{}, fmt.Errorf("error fetching org: %w", err) + return params.Organization{}, errors.Wrap(err, "fetching org") } return param, nil } @@ -242,7 +240,7 @@ func (s *sqlDatabase) GetOrganizationByID(ctx context.Context, orgID string) (pa func (s *sqlDatabase) getOrgByID(_ context.Context, db *gorm.DB, id string, preload ...string) (Organization, error) { u, err := uuid.Parse(id) if err != nil { - return Organization{}, fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) + return Organization{}, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") } var org Organization @@ -258,7 +256,7 @@ func (s *sqlDatabase) getOrgByID(_ context.Context, db *gorm.DB, id string, prel if errors.Is(q.Error, gorm.ErrRecordNotFound) { return Organization{}, runnerErrors.ErrNotFound } - return Organization{}, fmt.Errorf("error fetching org from database: %w", q.Error) + return Organization{}, errors.Wrap(q.Error, "fetching org from database") } return org, nil } @@ -268,16 +266,14 @@ func (s *sqlDatabase) getOrg(_ context.Context, name, endpointName string) (Orga q := s.conn.Where("name = ? COLLATE NOCASE and endpoint_name = ? COLLATE NOCASE", name, endpointName). Preload("Credentials"). - Preload("GiteaCredentials"). Preload("Credentials.Endpoint"). - Preload("GiteaCredentials.Endpoint"). Preload("Endpoint"). First(&org) if q.Error != nil { if errors.Is(q.Error, gorm.ErrRecordNotFound) { return Organization{}, runnerErrors.ErrNotFound } - return Organization{}, fmt.Errorf("error fetching org from database: %w", q.Error) + return Organization{}, errors.Wrap(q.Error, "fetching org from database") } return org, nil } diff --git a/database/sql/organizations_test.go b/database/sql/organizations_test.go index 245b3c1f..b80ae763 100644 --- a/database/sql/organizations_test.go +++ b/database/sql/organizations_test.go @@ -53,11 +53,9 @@ type OrgTestSuite struct { adminCtx context.Context adminUserID string - testCreds params.ForgeCredentials - testCredsGitea params.ForgeCredentials - secondaryTestCreds params.ForgeCredentials - githubEndpoint params.ForgeEndpoint - giteaEndpoint params.ForgeEndpoint + testCreds params.GithubCredentials + secondaryTestCreds params.GithubCredentials + githubEndpoint params.GithubEndpoint } func (s *OrgTestSuite) equalInstancesByName(expected, actual []params.Instance) { @@ -93,9 +91,7 @@ func (s *OrgTestSuite) SetupTest() { s.Require().NotEmpty(s.adminUserID) s.githubEndpoint = garmTesting.CreateDefaultGithubEndpoint(adminCtx, db, s.T()) - s.giteaEndpoint = garmTesting.CreateDefaultGiteaEndpoint(adminCtx, db, s.T()) s.testCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "new-creds", db, s.T(), s.githubEndpoint) - s.testCredsGitea = garmTesting.CreateTestGiteaCredentials(adminCtx, "new-creds", db, s.T(), s.giteaEndpoint) s.secondaryTestCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "secondary-creds", db, s.T(), s.githubEndpoint) // create some organization objects in the database, for testing purposes @@ -104,7 +100,7 @@ func (s *OrgTestSuite) SetupTest() { org, err := db.CreateOrganization( s.adminCtx, fmt.Sprintf("test-org-%d", i), - s.testCreds, + s.testCreds.Name, fmt.Sprintf("test-webhook-secret-%d", i), params.PoolBalancerTypeRoundRobin, ) @@ -183,7 +179,7 @@ func (s *OrgTestSuite) TestCreateOrganization() { org, err := s.Store.CreateOrganization( s.adminCtx, s.Fixtures.CreateOrgParams.Name, - s.testCreds, + s.Fixtures.CreateOrgParams.CredentialsName, s.Fixtures.CreateOrgParams.WebhookSecret, params.PoolBalancerTypeRoundRobin) @@ -196,62 +192,6 @@ func (s *OrgTestSuite) TestCreateOrganization() { s.Require().Equal(storeOrg.Name, org.Name) s.Require().Equal(storeOrg.Credentials.Name, org.Credentials.Name) s.Require().Equal(storeOrg.WebhookSecret, org.WebhookSecret) - - entity, err := org.GetEntity() - s.Require().Nil(err) - s.Require().Equal(entity.EntityType, params.ForgeEntityTypeOrganization) - s.Require().Equal(entity.ID, org.ID) - - forgeType, err := entity.GetForgeType() - s.Require().Nil(err) - s.Require().Equal(forgeType, params.GithubEndpointType) -} - -func (s *OrgTestSuite) TestCreateOrgForGitea() { - // call tested function - org, err := s.Store.CreateOrganization( - s.adminCtx, - s.Fixtures.CreateOrgParams.Name, - s.testCredsGitea, - s.Fixtures.CreateOrgParams.WebhookSecret, - params.PoolBalancerTypeRoundRobin) - - // assertions - s.Require().Nil(err) - storeOrg, err := s.Store.GetOrganizationByID(s.adminCtx, org.ID) - if err != nil { - s.FailNow(fmt.Sprintf("failed to get organization by id: %v", err)) - } - s.Require().Equal(storeOrg.Name, org.Name) - s.Require().Equal(storeOrg.Credentials.Name, org.Credentials.Name) - s.Require().Equal(storeOrg.WebhookSecret, org.WebhookSecret) - - entity, err := org.GetEntity() - s.Require().Nil(err) - s.Require().Equal(entity.EntityType, params.ForgeEntityTypeOrganization) - s.Require().Equal(entity.ID, org.ID) - - forgeType, err := entity.GetForgeType() - s.Require().Nil(err) - s.Require().Equal(forgeType, params.GiteaEndpointType) -} - -func (s *OrgTestSuite) TestCreateOrganizationInvalidForgeType() { - credentials := params.ForgeCredentials{ - Name: "test-creds", - Endpoint: s.githubEndpoint, - ID: 99, - ForgeType: params.EndpointType("invalid-forge-type"), - } - - _, err := s.Store.CreateOrganization( - s.adminCtx, - s.Fixtures.CreateOrgParams.Name, - credentials, - s.Fixtures.CreateOrgParams.WebhookSecret, - params.PoolBalancerTypeRoundRobin) - s.Require().NotNil(err) - s.Require().Equal("error creating org: unsupported credentials type: invalid request", err.Error()) } func (s *OrgTestSuite) TestCreateOrganizationInvalidDBPassphrase() { @@ -270,16 +210,25 @@ func (s *OrgTestSuite) TestCreateOrganizationInvalidDBPassphrase() { _, err = sqlDB.CreateOrganization( s.adminCtx, s.Fixtures.CreateOrgParams.Name, - s.testCreds, + s.Fixtures.CreateOrgParams.CredentialsName, s.Fixtures.CreateOrgParams.WebhookSecret, params.PoolBalancerTypeRoundRobin) s.Require().NotNil(err) - s.Require().Equal("error encoding secret: invalid passphrase length (expected length 32 characters)", err.Error()) + s.Require().Equal("encoding secret: invalid passphrase length (expected length 32 characters)", err.Error()) } func (s *OrgTestSuite) TestCreateOrganizationDBCreateErr() { s.Fixtures.SQLMock.ExpectBegin() + s.Fixtures.SQLMock. + ExpectQuery(regexp.QuoteMeta("SELECT * FROM `github_credentials` WHERE user_id = ? AND name = ? AND `github_credentials`.`deleted_at` IS NULL ORDER BY `github_credentials`.`id` LIMIT ?")). + WithArgs(s.adminUserID, s.Fixtures.Orgs[0].CredentialsName, 1). + WillReturnRows(sqlmock.NewRows([]string{"id", "endpoint_name"}). + AddRow(s.testCreds.ID, s.githubEndpoint.Name)) + s.Fixtures.SQLMock.ExpectQuery(regexp.QuoteMeta("SELECT * FROM `github_endpoints` WHERE `github_endpoints`.`name` = ? AND `github_endpoints`.`deleted_at` IS NULL")). + WithArgs(s.testCreds.Endpoint.Name). + WillReturnRows(sqlmock.NewRows([]string{"name"}). + AddRow(s.githubEndpoint.Name)) s.Fixtures.SQLMock. ExpectExec(regexp.QuoteMeta("INSERT INTO `organizations`")). WillReturnError(fmt.Errorf("creating org mock error")) @@ -288,12 +237,12 @@ func (s *OrgTestSuite) TestCreateOrganizationDBCreateErr() { _, err := s.StoreSQLMocked.CreateOrganization( s.adminCtx, s.Fixtures.CreateOrgParams.Name, - s.testCreds, + s.Fixtures.CreateOrgParams.CredentialsName, s.Fixtures.CreateOrgParams.WebhookSecret, params.PoolBalancerTypeRoundRobin) s.Require().NotNil(err) - s.Require().Equal("error creating org: error creating org: creating org mock error", err.Error()) + s.Require().Equal("creating org: creating org: creating org mock error", err.Error()) s.assertSQLMockExpectations() } @@ -316,7 +265,7 @@ func (s *OrgTestSuite) TestGetOrganizationNotFound() { _, err := s.Store.GetOrganization(s.adminCtx, "dummy-name", "github.com") s.Require().NotNil(err) - s.Require().Equal("error fetching org: not found", err.Error()) + s.Require().Equal("fetching org: not found", err.Error()) } func (s *OrgTestSuite) TestGetOrganizationDBDecryptingErr() { @@ -328,83 +277,27 @@ func (s *OrgTestSuite) TestGetOrganizationDBDecryptingErr() { _, err := s.StoreSQLMocked.GetOrganization(s.adminCtx, s.Fixtures.Orgs[0].Name, s.Fixtures.Orgs[0].Endpoint.Name) s.Require().NotNil(err) - s.Require().Equal("error fetching org: missing secret", err.Error()) + s.Require().Equal("fetching org: missing secret", err.Error()) s.assertSQLMockExpectations() } func (s *OrgTestSuite) TestListOrganizations() { - orgs, err := s.Store.ListOrganizations(s.adminCtx, params.OrganizationFilter{}) + orgs, err := s.Store.ListOrganizations(s.adminCtx) s.Require().Nil(err) garmTesting.EqualDBEntityByName(s.T(), s.Fixtures.Orgs, orgs) } -func (s *OrgTestSuite) TestListOrganizationsWithFilters() { - org, err := s.Store.CreateOrganization( - s.adminCtx, - "test-org", - s.testCreds, - "super secret", - params.PoolBalancerTypeRoundRobin, - ) - s.Require().NoError(err) - - org2, err := s.Store.CreateOrganization( - s.adminCtx, - "test-org", - s.testCredsGitea, - "super secret", - params.PoolBalancerTypeRoundRobin, - ) - s.Require().NoError(err) - - org3, err := s.Store.CreateOrganization( - s.adminCtx, - "test-org2", - s.testCreds, - "super secret", - params.PoolBalancerTypeRoundRobin, - ) - s.Require().NoError(err) - orgs, err := s.Store.ListOrganizations( - s.adminCtx, - params.OrganizationFilter{ - Name: "test-org", - }) - - s.Require().Nil(err) - garmTesting.EqualDBEntityByName(s.T(), []params.Organization{org, org2}, orgs) - - orgs, err = s.Store.ListOrganizations( - s.adminCtx, - params.OrganizationFilter{ - Name: "test-org", - Endpoint: s.giteaEndpoint.Name, - }) - - s.Require().Nil(err) - garmTesting.EqualDBEntityByName(s.T(), []params.Organization{org2}, orgs) - - orgs, err = s.Store.ListOrganizations( - s.adminCtx, - params.OrganizationFilter{ - Name: "test-org2", - }) - - s.Require().Nil(err) - garmTesting.EqualDBEntityByName(s.T(), []params.Organization{org3}, orgs) -} - func (s *OrgTestSuite) TestListOrganizationsDBFetchErr() { s.Fixtures.SQLMock. ExpectQuery(regexp.QuoteMeta("SELECT * FROM `organizations` WHERE `organizations`.`deleted_at` IS NULL")). WillReturnError(fmt.Errorf("fetching user from database mock error")) - _, err := s.StoreSQLMocked.ListOrganizations(s.adminCtx, params.OrganizationFilter{}) + _, err := s.StoreSQLMocked.ListOrganizations(s.adminCtx) s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("error fetching org from database: fetching user from database mock error", err.Error()) + s.Require().Equal("fetching org from database: fetching user from database mock error", err.Error()) } func (s *OrgTestSuite) TestDeleteOrganization() { @@ -413,14 +306,14 @@ func (s *OrgTestSuite) TestDeleteOrganization() { s.Require().Nil(err) _, err = s.Store.GetOrganizationByID(s.adminCtx, s.Fixtures.Orgs[0].ID) s.Require().NotNil(err) - s.Require().Equal("error fetching org: not found", err.Error()) + s.Require().Equal("fetching org: not found", err.Error()) } func (s *OrgTestSuite) TestDeleteOrganizationInvalidOrgID() { err := s.Store.DeleteOrganization(s.adminCtx, "dummy-org-id") s.Require().NotNil(err) - s.Require().Equal("error fetching org: error parsing id: invalid request", err.Error()) + s.Require().Equal("fetching org: parsing id: invalid request", err.Error()) } func (s *OrgTestSuite) TestDeleteOrganizationDBDeleteErr() { @@ -439,7 +332,7 @@ func (s *OrgTestSuite) TestDeleteOrganizationDBDeleteErr() { s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("error deleting org: mocked delete org error", err.Error()) + s.Require().Equal("deleting org: mocked delete org error", err.Error()) } func (s *OrgTestSuite) TestUpdateOrganization() { @@ -454,7 +347,7 @@ func (s *OrgTestSuite) TestUpdateOrganizationInvalidOrgID() { _, err := s.Store.UpdateOrganization(s.adminCtx, "dummy-org-id", s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("error saving org: error fetching org: error parsing id: invalid request", err.Error()) + s.Require().Equal("saving org: fetching org: parsing id: invalid request", err.Error()) } func (s *OrgTestSuite) TestUpdateOrganizationDBEncryptErr() { @@ -479,7 +372,7 @@ func (s *OrgTestSuite) TestUpdateOrganizationDBEncryptErr() { _, err := s.StoreSQLMocked.UpdateOrganization(s.adminCtx, s.Fixtures.Orgs[0].ID, s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("error saving org: saving org: failed to encrypt string: invalid passphrase length (expected length 32 characters)", err.Error()) + s.Require().Equal("saving org: saving org: failed to encrypt string: invalid passphrase length (expected length 32 characters)", err.Error()) s.assertSQLMockExpectations() } @@ -507,7 +400,7 @@ func (s *OrgTestSuite) TestUpdateOrganizationDBSaveErr() { _, err := s.StoreSQLMocked.UpdateOrganization(s.adminCtx, s.Fixtures.Orgs[0].ID, s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("error saving org: error saving org: saving org mock error", err.Error()) + s.Require().Equal("saving org: saving org: saving org mock error", err.Error()) s.assertSQLMockExpectations() } @@ -535,7 +428,7 @@ func (s *OrgTestSuite) TestUpdateOrganizationDBDecryptingErr() { _, err := s.StoreSQLMocked.UpdateOrganization(s.adminCtx, s.Fixtures.Orgs[0].ID, s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("error saving org: saving org: failed to encrypt string: invalid passphrase length (expected length 32 characters)", err.Error()) + s.Require().Equal("saving org: saving org: failed to encrypt string: invalid passphrase length (expected length 32 characters)", err.Error()) s.assertSQLMockExpectations() } @@ -550,7 +443,7 @@ func (s *OrgTestSuite) TestGetOrganizationByIDInvalidOrgID() { _, err := s.Store.GetOrganizationByID(s.adminCtx, "dummy-org-id") s.Require().NotNil(err) - s.Require().Equal("error fetching org: error parsing id: invalid request", err.Error()) + s.Require().Equal("fetching org: parsing id: invalid request", err.Error()) } func (s *OrgTestSuite) TestGetOrganizationByIDDBDecryptingErr() { @@ -558,10 +451,6 @@ func (s *OrgTestSuite) TestGetOrganizationByIDDBDecryptingErr() { ExpectQuery(regexp.QuoteMeta("SELECT * FROM `organizations` WHERE id = ? AND `organizations`.`deleted_at` IS NULL ORDER BY `organizations`.`id` LIMIT ?")). WithArgs(s.Fixtures.Orgs[0].ID, 1). WillReturnRows(sqlmock.NewRows([]string{"id"}).AddRow(s.Fixtures.Orgs[0].ID)) - s.Fixtures.SQLMock. - ExpectQuery(regexp.QuoteMeta("SELECT * FROM `organization_events` WHERE `organization_events`.`org_id` = ? AND `organization_events`.`deleted_at` IS NULL")). - WithArgs(s.Fixtures.Orgs[0].ID). - WillReturnRows(sqlmock.NewRows([]string{"org_id"}).AddRow(s.Fixtures.Orgs[0].ID)) s.Fixtures.SQLMock. ExpectQuery(regexp.QuoteMeta("SELECT * FROM `pools` WHERE `pools`.`org_id` = ? AND `pools`.`deleted_at` IS NULL")). WithArgs(s.Fixtures.Orgs[0].ID). @@ -571,7 +460,7 @@ func (s *OrgTestSuite) TestGetOrganizationByIDDBDecryptingErr() { s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("error fetching org: missing secret", err.Error()) + s.Require().Equal("fetching org: missing secret", err.Error()) } func (s *OrgTestSuite) TestCreateOrganizationPool() { @@ -603,14 +492,14 @@ func (s *OrgTestSuite) TestCreateOrganizationPoolMissingTags() { } func (s *OrgTestSuite) TestCreateOrganizationPoolInvalidOrgID() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: "dummy-org-id", - EntityType: params.ForgeEntityTypeOrganization, + EntityType: params.GithubEntityTypeOrganization, } _, err := s.Store.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("error parsing id: invalid request", err.Error()) + s.Require().Equal("parsing id: invalid request", err.Error()) } func (s *OrgTestSuite) TestCreateOrganizationPoolDBFetchTagErr() { @@ -628,7 +517,7 @@ func (s *OrgTestSuite) TestCreateOrganizationPoolDBFetchTagErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("error creating tag: error fetching tag from database: mocked fetching tag error", err.Error()) + s.Require().Equal("creating tag: fetching tag from database: mocked fetching tag error", err.Error()) s.assertSQLMockExpectations() } @@ -656,7 +545,7 @@ func (s *OrgTestSuite) TestCreateOrganizationPoolDBAddingPoolErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("error creating pool: mocked adding pool error", err.Error()) + s.Require().Equal("creating pool: mocked adding pool error", err.Error()) s.assertSQLMockExpectations() } @@ -687,7 +576,7 @@ func (s *OrgTestSuite) TestCreateOrganizationPoolDBSaveTagErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("error associating tags: mocked saving tag error", err.Error()) + s.Require().Equal("associating tags: mocked saving tag error", err.Error()) s.assertSQLMockExpectations() } @@ -728,7 +617,7 @@ func (s *OrgTestSuite) TestCreateOrganizationPoolDBFetchPoolErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("error fetching pool by ID: not found", err.Error()) + s.Require().Equal("fetching pool: not found", err.Error()) s.assertSQLMockExpectations() } @@ -751,14 +640,14 @@ func (s *OrgTestSuite) TestListOrgPools() { } func (s *OrgTestSuite) TestListOrgPoolsInvalidOrgID() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: "dummy-org-id", - EntityType: params.ForgeEntityTypeOrganization, + EntityType: params.GithubEntityTypeOrganization, } _, err := s.Store.ListEntityPools(s.adminCtx, entity) s.Require().NotNil(err) - s.Require().Equal("error fetching pools: error parsing id: invalid request", err.Error()) + s.Require().Equal("fetching pools: parsing id: invalid request", err.Error()) } func (s *OrgTestSuite) TestGetOrganizationPool() { @@ -776,14 +665,14 @@ func (s *OrgTestSuite) TestGetOrganizationPool() { } func (s *OrgTestSuite) TestGetOrganizationPoolInvalidOrgID() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: "dummy-org-id", - EntityType: params.ForgeEntityTypeOrganization, + EntityType: params.GithubEntityTypeOrganization, } _, err := s.Store.GetEntityPool(s.adminCtx, entity, "dummy-pool-id") s.Require().NotNil(err) - s.Require().Equal("fetching pool: error parsing id: invalid request", err.Error()) + s.Require().Equal("fetching pool: parsing id: invalid request", err.Error()) } func (s *OrgTestSuite) TestDeleteOrganizationPool() { @@ -798,18 +687,18 @@ func (s *OrgTestSuite) TestDeleteOrganizationPool() { s.Require().Nil(err) _, err = s.Store.GetEntityPool(s.adminCtx, entity, pool.ID) - s.Require().Equal("fetching pool: error finding pool: not found", err.Error()) + s.Require().Equal("fetching pool: finding pool: not found", err.Error()) } func (s *OrgTestSuite) TestDeleteOrganizationPoolInvalidOrgID() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: "dummy-org-id", - EntityType: params.ForgeEntityTypeOrganization, + EntityType: params.GithubEntityTypeOrganization, } err := s.Store.DeleteEntityPool(s.adminCtx, entity, "dummy-pool-id") s.Require().NotNil(err) - s.Require().Equal("error parsing id: invalid request", err.Error()) + s.Require().Equal("parsing id: invalid request", err.Error()) } func (s *OrgTestSuite) TestDeleteOrganizationPoolDBDeleteErr() { @@ -831,7 +720,7 @@ func (s *OrgTestSuite) TestDeleteOrganizationPoolDBDeleteErr() { err = s.StoreSQLMocked.DeleteEntityPool(s.adminCtx, entity, pool.ID) s.Require().NotNil(err) - s.Require().Equal("error removing pool: mocked deleting pool error", err.Error()) + s.Require().Equal("removing pool: mocked deleting pool error", err.Error()) s.assertSQLMockExpectations() } @@ -859,14 +748,14 @@ func (s *OrgTestSuite) TestListOrgInstances() { } func (s *OrgTestSuite) TestListOrgInstancesInvalidOrgID() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: "dummy-org-id", - EntityType: params.ForgeEntityTypeOrganization, + EntityType: params.GithubEntityTypeOrganization, } _, err := s.Store.ListEntityInstances(s.adminCtx, entity) s.Require().NotNil(err) - s.Require().Equal("error fetching entity: error parsing id: invalid request", err.Error()) + s.Require().Equal("fetching entity: parsing id: invalid request", err.Error()) } func (s *OrgTestSuite) TestUpdateOrganizationPool() { @@ -886,39 +775,18 @@ func (s *OrgTestSuite) TestUpdateOrganizationPool() { s.Require().Equal(s.Fixtures.UpdatePoolParams.Flavor, pool.Flavor) } -func (s *OrgTestSuite) TestAddOrgEntityEvent() { - org, err := s.Store.CreateOrganization( - s.adminCtx, - s.Fixtures.CreateOrgParams.Name, - s.testCreds, - s.Fixtures.CreateOrgParams.WebhookSecret, - params.PoolBalancerTypeRoundRobin) - - s.Require().Nil(err) - entity, err := org.GetEntity() - s.Require().Nil(err) - err = s.Store.AddEntityEvent(s.adminCtx, entity, params.StatusEvent, params.EventInfo, "this is a test", 20) - s.Require().Nil(err) - - org, err = s.Store.GetOrganizationByID(s.adminCtx, org.ID) - s.Require().Nil(err) - s.Require().Equal(1, len(org.Events)) - s.Require().Equal(params.StatusEvent, org.Events[0].EventType) - s.Require().Equal(params.EventInfo, org.Events[0].EventLevel) - s.Require().Equal("this is a test", org.Events[0].Message) -} - func (s *OrgTestSuite) TestUpdateOrganizationPoolInvalidOrgID() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: "dummy-org-id", - EntityType: params.ForgeEntityTypeOrganization, + EntityType: params.GithubEntityTypeOrganization, } _, err := s.Store.UpdateEntityPool(s.adminCtx, entity, "dummy-pool-id", s.Fixtures.UpdatePoolParams) s.Require().NotNil(err) - s.Require().Equal("error fetching pool: error parsing id: invalid request", err.Error()) + s.Require().Equal("fetching pool: parsing id: invalid request", err.Error()) } func TestOrgTestSuite(t *testing.T) { + t.Parallel() suite.Run(t, new(OrgTestSuite)) } diff --git a/database/sql/pools.go b/database/sql/pools.go index e86087ad..cd888505 100644 --- a/database/sql/pools.go +++ b/database/sql/pools.go @@ -16,10 +16,10 @@ package sql import ( "context" - "errors" "fmt" "github.com/google/uuid" + "github.com/pkg/errors" "gorm.io/datatypes" "gorm.io/gorm" @@ -37,18 +37,15 @@ const ( func (s *sqlDatabase) ListAllPools(_ context.Context) ([]params.Pool, error) { var pools []Pool - q := s.conn. + q := s.conn.Model(&Pool{}). Preload("Tags"). Preload("Organization"). - Preload("Organization.Endpoint"). Preload("Repository"). - Preload("Repository.Endpoint"). Preload("Enterprise"). - Preload("Enterprise.Endpoint"). Omit("extra_specs"). Find(&pools) if q.Error != nil { - return nil, fmt.Errorf("error fetching all pools: %w", q.Error) + return nil, errors.Wrap(q.Error, "fetching all pools") } ret := make([]params.Pool, len(pools)) @@ -56,34 +53,27 @@ func (s *sqlDatabase) ListAllPools(_ context.Context) ([]params.Pool, error) { for idx, val := range pools { ret[idx], err = s.sqlToCommonPool(val) if err != nil { - return nil, fmt.Errorf("error converting pool: %w", err) + return nil, errors.Wrap(err, "converting pool") } } return ret, nil } func (s *sqlDatabase) GetPoolByID(_ context.Context, poolID string) (params.Pool, error) { - preloadList := []string{ - "Tags", - "Instances", - "Enterprise", - "Enterprise.Endpoint", - "Organization", - "Organization.Endpoint", - "Repository", - "Repository.Endpoint", - } - pool, err := s.getPoolByID(s.conn, poolID, preloadList...) + pool, err := s.getPoolByID(s.conn, poolID, "Tags", "Instances", "Enterprise", "Organization", "Repository") if err != nil { - return params.Pool{}, fmt.Errorf("error fetching pool by ID: %w", err) + return params.Pool{}, errors.Wrap(err, "fetching pool by ID") } return s.sqlToCommonPool(pool) } func (s *sqlDatabase) DeletePoolByID(_ context.Context, poolID string) (err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + pool, err := s.getPoolByID(s.conn, poolID) if err != nil { - return fmt.Errorf("error fetching pool by ID: %w", err) + return errors.Wrap(err, "fetching pool by ID") } defer func() { @@ -93,34 +83,34 @@ func (s *sqlDatabase) DeletePoolByID(_ context.Context, poolID string) (err erro }() if q := s.conn.Unscoped().Delete(&pool); q.Error != nil { - return fmt.Errorf("error removing pool: %w", q.Error) + return errors.Wrap(q.Error, "removing pool") } return nil } -func (s *sqlDatabase) getEntityPool(tx *gorm.DB, entityType params.ForgeEntityType, entityID, poolID string, preload ...string) (Pool, error) { +func (s *sqlDatabase) getEntityPool(tx *gorm.DB, entityType params.GithubEntityType, entityID, poolID string, preload ...string) (Pool, error) { if entityID == "" { - return Pool{}, fmt.Errorf("error missing entity id: %w", runnerErrors.ErrBadRequest) + return Pool{}, errors.Wrap(runnerErrors.ErrBadRequest, "missing entity id") } u, err := uuid.Parse(poolID) if err != nil { - return Pool{}, fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) + return Pool{}, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") } var fieldName string var entityField string switch entityType { - case params.ForgeEntityTypeRepository: + case params.GithubEntityTypeRepository: fieldName = entityTypeRepoName - entityField = repositoryFieldName - case params.ForgeEntityTypeOrganization: + entityField = "Repository" + case params.GithubEntityTypeOrganization: fieldName = entityTypeOrgName - entityField = organizationFieldName - case params.ForgeEntityTypeEnterprise: + entityField = "Organization" + case params.GithubEntityTypeEnterprise: fieldName = entityTypeEnterpriseName - entityField = enterpriseFieldName + entityField = "Enterprise" default: return Pool{}, fmt.Errorf("invalid entityType: %v", entityType) } @@ -140,33 +130,33 @@ func (s *sqlDatabase) getEntityPool(tx *gorm.DB, entityType params.ForgeEntityTy First(&pool).Error if err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return Pool{}, fmt.Errorf("error finding pool: %w", runnerErrors.ErrNotFound) + return Pool{}, errors.Wrap(runnerErrors.ErrNotFound, "finding pool") } - return Pool{}, fmt.Errorf("error fetching pool: %w", err) + return Pool{}, errors.Wrap(err, "fetching pool") } return pool, nil } -func (s *sqlDatabase) listEntityPools(tx *gorm.DB, entityType params.ForgeEntityType, entityID string, preload ...string) ([]Pool, error) { +func (s *sqlDatabase) listEntityPools(tx *gorm.DB, entityType params.GithubEntityType, entityID string, preload ...string) ([]Pool, error) { if _, err := uuid.Parse(entityID); err != nil { - return nil, fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) + return nil, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") } if err := s.hasGithubEntity(tx, entityType, entityID); err != nil { - return nil, fmt.Errorf("error checking entity existence: %w", err) + return nil, errors.Wrap(err, "checking entity existence") } var preloadEntity string var fieldName string switch entityType { - case params.ForgeEntityTypeRepository: + case params.GithubEntityTypeRepository: fieldName = entityTypeRepoName preloadEntity = "Repository" - case params.ForgeEntityTypeOrganization: + case params.GithubEntityTypeOrganization: fieldName = entityTypeOrgName preloadEntity = "Organization" - case params.ForgeEntityTypeEnterprise: + case params.GithubEntityTypeEnterprise: fieldName = entityTypeEnterpriseName preloadEntity = "Enterprise" default: @@ -191,28 +181,28 @@ func (s *sqlDatabase) listEntityPools(tx *gorm.DB, entityType params.ForgeEntity if errors.Is(err, gorm.ErrRecordNotFound) { return []Pool{}, nil } - return nil, fmt.Errorf("error fetching pool: %w", err) + return nil, errors.Wrap(err, "fetching pool") } return pools, nil } -func (s *sqlDatabase) findPoolByTags(id string, poolType params.ForgeEntityType, tags []string) ([]params.Pool, error) { +func (s *sqlDatabase) findPoolByTags(id string, poolType params.GithubEntityType, tags []string) ([]params.Pool, error) { if len(tags) == 0 { return nil, runnerErrors.NewBadRequestError("missing tags") } u, err := uuid.Parse(id) if err != nil { - return nil, fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) + return nil, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") } var fieldName string switch poolType { - case params.ForgeEntityTypeRepository: + case params.GithubEntityTypeRepository: fieldName = entityTypeRepoName - case params.ForgeEntityTypeOrganization: + case params.GithubEntityTypeOrganization: fieldName = entityTypeOrgName - case params.ForgeEntityTypeEnterprise: + case params.GithubEntityTypeEnterprise: fieldName = entityTypeEnterpriseName default: return nil, fmt.Errorf("invalid poolType: %v", poolType) @@ -233,7 +223,7 @@ func (s *sqlDatabase) findPoolByTags(id string, poolType params.ForgeEntityType, if errors.Is(q.Error, gorm.ErrRecordNotFound) { return nil, runnerErrors.ErrNotFound } - return nil, fmt.Errorf("error fetching pool: %w", q.Error) + return nil, errors.Wrap(q.Error, "fetching pool") } if len(pools) == 0 { @@ -244,14 +234,14 @@ func (s *sqlDatabase) findPoolByTags(id string, poolType params.ForgeEntityType, for idx, val := range pools { ret[idx], err = s.sqlToCommonPool(val) if err != nil { - return nil, fmt.Errorf("error converting pool: %w", err) + return nil, errors.Wrap(err, "converting pool") } } return ret, nil } -func (s *sqlDatabase) FindPoolsMatchingAllTags(_ context.Context, entityType params.ForgeEntityType, entityID string, tags []string) ([]params.Pool, error) { +func (s *sqlDatabase) FindPoolsMatchingAllTags(_ context.Context, entityType params.GithubEntityType, entityID string, tags []string) ([]params.Pool, error) { if len(tags) == 0 { return nil, runnerErrors.NewBadRequestError("missing tags") } @@ -261,13 +251,16 @@ func (s *sqlDatabase) FindPoolsMatchingAllTags(_ context.Context, entityType par if errors.Is(err, runnerErrors.ErrNotFound) { return []params.Pool{}, nil } - return nil, fmt.Errorf("error fetching pools: %w", err) + return nil, errors.Wrap(err, "fetching pools") } return pools, nil } -func (s *sqlDatabase) CreateEntityPool(ctx context.Context, entity params.ForgeEntity, param params.CreatePoolParams) (pool params.Pool, err error) { +func (s *sqlDatabase) CreateEntityPool(_ context.Context, entity params.GithubEntity, param params.CreatePoolParams) (pool params.Pool, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + if len(param.Tags) == 0 { return params.Pool{}, runnerErrors.NewBadRequestError("no tags specified") } @@ -298,39 +291,39 @@ func (s *sqlDatabase) CreateEntityPool(ctx context.Context, entity params.ForgeE entityID, err := uuid.Parse(entity.ID) if err != nil { - return params.Pool{}, fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) + return params.Pool{}, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") } switch entity.EntityType { - case params.ForgeEntityTypeRepository: + case params.GithubEntityTypeRepository: newPool.RepoID = &entityID - case params.ForgeEntityTypeOrganization: + case params.GithubEntityTypeOrganization: newPool.OrgID = &entityID - case params.ForgeEntityTypeEnterprise: + case params.GithubEntityTypeEnterprise: newPool.EnterpriseID = &entityID } err = s.conn.Transaction(func(tx *gorm.DB) error { if err := s.hasGithubEntity(tx, entity.EntityType, entity.ID); err != nil { - return fmt.Errorf("error checking entity existence: %w", err) + return errors.Wrap(err, "checking entity existence") } tags := []Tag{} for _, val := range param.Tags { t, err := s.getOrCreateTag(tx, val) if err != nil { - return fmt.Errorf("error creating tag: %w", err) + return errors.Wrap(err, "creating tag") } tags = append(tags, t) } q := tx.Create(&newPool) if q.Error != nil { - return fmt.Errorf("error creating pool: %w", q.Error) + return errors.Wrap(q.Error, "creating pool") } for i := range tags { if err := tx.Model(&newPool).Association("Tags").Append(&tags[i]); err != nil { - return fmt.Errorf("error associating tags: %w", err) + return errors.Wrap(err, "associating tags") } } return nil @@ -339,31 +332,29 @@ func (s *sqlDatabase) CreateEntityPool(ctx context.Context, entity params.ForgeE return params.Pool{}, err } - return s.GetPoolByID(ctx, newPool.ID.String()) + dbPool, err := s.getPoolByID(s.conn, newPool.ID.String(), "Tags", "Instances", "Enterprise", "Organization", "Repository") + if err != nil { + return params.Pool{}, errors.Wrap(err, "fetching pool") + } + + return s.sqlToCommonPool(dbPool) } -func (s *sqlDatabase) GetEntityPool(_ context.Context, entity params.ForgeEntity, poolID string) (params.Pool, error) { - preloadList := []string{ - "Tags", - "Instances", - "Enterprise", - "Enterprise.Endpoint", - "Organization", - "Organization.Endpoint", - "Repository", - "Repository.Endpoint", - } - pool, err := s.getEntityPool(s.conn, entity.EntityType, entity.ID, poolID, preloadList...) +func (s *sqlDatabase) GetEntityPool(_ context.Context, entity params.GithubEntity, poolID string) (params.Pool, error) { + pool, err := s.getEntityPool(s.conn, entity.EntityType, entity.ID, poolID, "Tags", "Instances") if err != nil { return params.Pool{}, fmt.Errorf("fetching pool: %w", err) } return s.sqlToCommonPool(pool) } -func (s *sqlDatabase) DeleteEntityPool(_ context.Context, entity params.ForgeEntity, poolID string) (err error) { +func (s *sqlDatabase) DeleteEntityPool(_ context.Context, entity params.GithubEntity, poolID string) (err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + entityID, err := uuid.Parse(entity.ID) if err != nil { - return fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) + return errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") } defer func() { @@ -377,27 +368,30 @@ func (s *sqlDatabase) DeleteEntityPool(_ context.Context, entity params.ForgeEnt poolUUID, err := uuid.Parse(poolID) if err != nil { - return fmt.Errorf("error parsing pool id: %w", runnerErrors.ErrBadRequest) + return errors.Wrap(runnerErrors.ErrBadRequest, "parsing pool id") } var fieldName string switch entity.EntityType { - case params.ForgeEntityTypeRepository: + case params.GithubEntityTypeRepository: fieldName = entityTypeRepoName - case params.ForgeEntityTypeOrganization: + case params.GithubEntityTypeOrganization: fieldName = entityTypeOrgName - case params.ForgeEntityTypeEnterprise: + case params.GithubEntityTypeEnterprise: fieldName = entityTypeEnterpriseName default: return fmt.Errorf("invalid entityType: %v", entity.EntityType) } condition := fmt.Sprintf("id = ? and %s = ?", fieldName) if err := s.conn.Unscoped().Where(condition, poolUUID, entityID).Delete(&Pool{}).Error; err != nil { - return fmt.Errorf("error removing pool: %w", err) + return errors.Wrap(err, "removing pool") } return nil } -func (s *sqlDatabase) UpdateEntityPool(ctx context.Context, entity params.ForgeEntity, poolID string, param params.UpdatePoolParams) (updatedPool params.Pool, err error) { +func (s *sqlDatabase) UpdateEntityPool(_ context.Context, entity params.GithubEntity, poolID string, param params.UpdatePoolParams) (updatedPool params.Pool, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + defer func() { if err == nil { s.sendNotify(common.PoolEntityType, common.UpdateOperation, updatedPool) @@ -406,57 +400,49 @@ func (s *sqlDatabase) UpdateEntityPool(ctx context.Context, entity params.ForgeE err = s.conn.Transaction(func(tx *gorm.DB) error { pool, err := s.getEntityPool(tx, entity.EntityType, entity.ID, poolID, "Tags", "Instances") if err != nil { - return fmt.Errorf("error fetching pool: %w", err) + return errors.Wrap(err, "fetching pool") } updatedPool, err = s.updatePool(tx, pool, param) if err != nil { - return fmt.Errorf("error updating pool: %w", err) + return errors.Wrap(err, "updating pool") } return nil }) if err != nil { return params.Pool{}, err } - - updatedPool, err = s.GetPoolByID(ctx, poolID) - if err != nil { - return params.Pool{}, err - } return updatedPool, nil } -func (s *sqlDatabase) ListEntityPools(_ context.Context, entity params.ForgeEntity) ([]params.Pool, error) { +func (s *sqlDatabase) ListEntityPools(_ context.Context, entity params.GithubEntity) ([]params.Pool, error) { pools, err := s.listEntityPools(s.conn, entity.EntityType, entity.ID, "Tags") if err != nil { - return nil, fmt.Errorf("error fetching pools: %w", err) + return nil, errors.Wrap(err, "fetching pools") } ret := make([]params.Pool, len(pools)) for idx, pool := range pools { ret[idx], err = s.sqlToCommonPool(pool) if err != nil { - return nil, fmt.Errorf("error fetching pool: %w", err) + return nil, errors.Wrap(err, "fetching pool") } } return ret, nil } -func (s *sqlDatabase) ListEntityInstances(_ context.Context, entity params.ForgeEntity) ([]params.Instance, error) { +func (s *sqlDatabase) ListEntityInstances(_ context.Context, entity params.GithubEntity) ([]params.Instance, error) { pools, err := s.listEntityPools(s.conn, entity.EntityType, entity.ID, "Instances", "Instances.Job") if err != nil { - return nil, fmt.Errorf("error fetching entity: %w", err) + return nil, errors.Wrap(err, "fetching entity") } ret := []params.Instance{} for _, pool := range pools { - instances := pool.Instances - pool.Instances = nil - for _, instance := range instances { - instance.Pool = pool + for _, instance := range pool.Instances { paramsInstance, err := s.sqlToParamsInstance(instance) if err != nil { - return nil, fmt.Errorf("error fetching instance: %w", err) + return nil, errors.Wrap(err, "fetching instance") } ret = append(ret, paramsInstance) } diff --git a/database/sql/pools_test.go b/database/sql/pools_test.go index 297f4cdf..e6cf7f4a 100644 --- a/database/sql/pools_test.go +++ b/database/sql/pools_test.go @@ -16,7 +16,6 @@ package sql import ( "context" - "encoding/json" "flag" "fmt" "regexp" @@ -28,9 +27,7 @@ import ( "gorm.io/gorm" "gorm.io/gorm/logger" - commonParams "github.com/cloudbase/garm-provider-common/params" dbCommon "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/database/watcher" garmTesting "github.com/cloudbase/garm/internal/testing" "github.com/cloudbase/garm/params" ) @@ -43,9 +40,7 @@ type PoolsTestFixtures struct { type PoolsTestSuite struct { suite.Suite - Store dbCommon.Store - ctx context.Context - + Store dbCommon.Store StoreSQLMocked *sqlDatabase Fixtures *PoolsTestFixtures adminCtx context.Context @@ -58,21 +53,13 @@ func (s *PoolsTestSuite) assertSQLMockExpectations() { } } -func (s *PoolsTestSuite) TearDownTest() { - watcher.CloseWatcher() -} - func (s *PoolsTestSuite) SetupTest() { // create testing sqlite database - ctx := context.Background() - watcher.InitWatcher(ctx) - db, err := NewSQLDatabase(context.Background(), garmTesting.GetTestSqliteDBConfig(s.T())) if err != nil { s.FailNow(fmt.Sprintf("failed to create db connection: %s", err)) } s.Store = db - s.ctx = garmTesting.ImpersonateAdminContext(ctx, s.Store, s.T()) adminCtx := garmTesting.ImpersonateAdminContext(context.Background(), db, s.T()) s.adminCtx = adminCtx @@ -81,7 +68,7 @@ func (s *PoolsTestSuite) SetupTest() { creds := garmTesting.CreateTestGithubCredentials(adminCtx, "new-creds", db, s.T(), githubEndpoint) // create an organization for testing purposes - org, err := s.Store.CreateOrganization(s.adminCtx, "test-org", creds, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) + org, err := s.Store.CreateOrganization(s.adminCtx, "test-org", creds.Name, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) if err != nil { s.FailNow(fmt.Sprintf("failed to create org: %s", err)) } @@ -157,7 +144,7 @@ func (s *PoolsTestSuite) TestListAllPoolsDBFetchErr() { s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("error fetching all pools: mocked fetching all pools error", err.Error()) + s.Require().Equal("fetching all pools: mocked fetching all pools error", err.Error()) } func (s *PoolsTestSuite) TestGetPoolByID() { @@ -171,7 +158,7 @@ func (s *PoolsTestSuite) TestGetPoolByIDInvalidPoolID() { _, err := s.Store.GetPoolByID(s.adminCtx, "dummy-pool-id") s.Require().NotNil(err) - s.Require().Equal("error fetching pool by ID: error parsing id: invalid request", err.Error()) + s.Require().Equal("fetching pool by ID: parsing id: invalid request", err.Error()) } func (s *PoolsTestSuite) TestDeletePoolByID() { @@ -179,14 +166,14 @@ func (s *PoolsTestSuite) TestDeletePoolByID() { s.Require().Nil(err) _, err = s.Store.GetPoolByID(s.adminCtx, s.Fixtures.Pools[0].ID) - s.Require().Equal("error fetching pool by ID: not found", err.Error()) + s.Require().Equal("fetching pool by ID: not found", err.Error()) } func (s *PoolsTestSuite) TestDeletePoolByIDInvalidPoolID() { err := s.Store.DeletePoolByID(s.adminCtx, "dummy-pool-id") s.Require().NotNil(err) - s.Require().Equal("error fetching pool by ID: error parsing id: invalid request", err.Error()) + s.Require().Equal("fetching pool by ID: parsing id: invalid request", err.Error()) } func (s *PoolsTestSuite) TestDeletePoolByIDDBRemoveErr() { @@ -204,135 +191,10 @@ func (s *PoolsTestSuite) TestDeletePoolByIDDBRemoveErr() { s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("error removing pool: mocked removing pool error", err.Error()) -} - -func (s *PoolsTestSuite) TestEntityPoolOperations() { - ep := garmTesting.CreateDefaultGithubEndpoint(s.ctx, s.Store, s.T()) - creds := garmTesting.CreateTestGithubCredentials(s.ctx, "test-creds", s.Store, s.T(), ep) - s.T().Cleanup(func() { s.Store.DeleteGithubCredentials(s.ctx, creds.ID) }) - repo, err := s.Store.CreateRepository(s.ctx, "test-owner", "test-repo", creds, "test-secret", params.PoolBalancerTypeRoundRobin) - s.Require().NoError(err) - s.Require().NotEmpty(repo.ID) - s.T().Cleanup(func() { s.Store.DeleteRepository(s.ctx, repo.ID) }) - - entity, err := repo.GetEntity() - s.Require().NoError(err) - - createPoolParams := params.CreatePoolParams{ - ProviderName: "test-provider", - Image: "test-image", - Flavor: "test-flavor", - OSType: commonParams.Linux, - OSArch: commonParams.Amd64, - Tags: []string{"test-tag"}, - } - - pool, err := s.Store.CreateEntityPool(s.ctx, entity, createPoolParams) - s.Require().NoError(err) - s.Require().NotEmpty(pool.ID) - s.T().Cleanup(func() { s.Store.DeleteEntityPool(s.ctx, entity, pool.ID) }) - - entityPool, err := s.Store.GetEntityPool(s.ctx, entity, pool.ID) - s.Require().NoError(err) - s.Require().Equal(pool.ID, entityPool.ID) - s.Require().Equal(pool.ProviderName, entityPool.ProviderName) - - updatePoolParams := params.UpdatePoolParams{ - Enabled: garmTesting.Ptr(true), - Flavor: "new-flavor", - Image: "new-image", - RunnerPrefix: params.RunnerPrefix{ - Prefix: "new-prefix", - }, - MaxRunners: garmTesting.Ptr(uint(100)), - MinIdleRunners: garmTesting.Ptr(uint(50)), - OSType: commonParams.Windows, - OSArch: commonParams.Amd64, - Tags: []string{"new-tag"}, - RunnerBootstrapTimeout: garmTesting.Ptr(uint(10)), - ExtraSpecs: json.RawMessage(`{"extra": "specs"}`), - GitHubRunnerGroup: garmTesting.Ptr("new-group"), - Priority: garmTesting.Ptr(uint(1)), - } - pool, err = s.Store.UpdateEntityPool(s.ctx, entity, pool.ID, updatePoolParams) - s.Require().NoError(err) - s.Require().Equal(*updatePoolParams.Enabled, pool.Enabled) - s.Require().Equal(updatePoolParams.Flavor, pool.Flavor) - s.Require().Equal(updatePoolParams.Image, pool.Image) - s.Require().Equal(updatePoolParams.RunnerPrefix.Prefix, pool.RunnerPrefix.Prefix) - s.Require().Equal(*updatePoolParams.MaxRunners, pool.MaxRunners) - s.Require().Equal(*updatePoolParams.MinIdleRunners, pool.MinIdleRunners) - s.Require().Equal(updatePoolParams.OSType, pool.OSType) - s.Require().Equal(updatePoolParams.OSArch, pool.OSArch) - s.Require().Equal(*updatePoolParams.RunnerBootstrapTimeout, pool.RunnerBootstrapTimeout) - s.Require().Equal(updatePoolParams.ExtraSpecs, pool.ExtraSpecs) - s.Require().Equal(*updatePoolParams.GitHubRunnerGroup, pool.GitHubRunnerGroup) - s.Require().Equal(*updatePoolParams.Priority, pool.Priority) - - entityPools, err := s.Store.ListEntityPools(s.ctx, entity) - s.Require().NoError(err) - s.Require().Len(entityPools, 1) - s.Require().Equal(pool.ID, entityPools[0].ID) - - tagsToMatch := []string{"new-tag"} - pools, err := s.Store.FindPoolsMatchingAllTags(s.ctx, entity.EntityType, entity.ID, tagsToMatch) - s.Require().NoError(err) - s.Require().Len(pools, 1) - s.Require().Equal(pool.ID, pools[0].ID) - - invalidTagsToMatch := []string{"invalid-tag"} - pools, err = s.Store.FindPoolsMatchingAllTags(s.ctx, entity.EntityType, entity.ID, invalidTagsToMatch) - s.Require().NoError(err) - s.Require().Len(pools, 0) -} - -func (s *PoolsTestSuite) TestListEntityInstances() { - ep := garmTesting.CreateDefaultGithubEndpoint(s.ctx, s.Store, s.T()) - creds := garmTesting.CreateTestGithubCredentials(s.ctx, "test-creds", s.Store, s.T(), ep) - s.T().Cleanup(func() { s.Store.DeleteGithubCredentials(s.ctx, creds.ID) }) - repo, err := s.Store.CreateRepository(s.ctx, "test-owner", "test-repo", creds, "test-secret", params.PoolBalancerTypeRoundRobin) - s.Require().NoError(err) - s.Require().NotEmpty(repo.ID) - s.T().Cleanup(func() { s.Store.DeleteRepository(s.ctx, repo.ID) }) - - entity, err := repo.GetEntity() - s.Require().NoError(err) - - createPoolParams := params.CreatePoolParams{ - ProviderName: "test-provider", - Image: "test-image", - Flavor: "test-flavor", - OSType: commonParams.Linux, - OSArch: commonParams.Amd64, - Tags: []string{"test-tag"}, - } - - pool, err := s.Store.CreateEntityPool(s.ctx, entity, createPoolParams) - s.Require().NoError(err) - s.Require().NotEmpty(pool.ID) - s.T().Cleanup(func() { s.Store.DeleteEntityPool(s.ctx, entity, pool.ID) }) - - createInstanceParams := params.CreateInstanceParams{ - Name: "test-instance", - OSType: commonParams.Linux, - OSArch: commonParams.Amd64, - Status: commonParams.InstanceCreating, - } - instance, err := s.Store.CreateInstance(s.ctx, pool.ID, createInstanceParams) - s.Require().NoError(err) - s.Require().NotEmpty(instance.ID) - - s.T().Cleanup(func() { s.Store.DeleteInstance(s.ctx, pool.ID, instance.ID) }) - - instances, err := s.Store.ListEntityInstances(s.ctx, entity) - s.Require().NoError(err) - s.Require().Len(instances, 1) - s.Require().Equal(instance.ID, instances[0].ID) - s.Require().Equal(instance.Name, instances[0].Name) - s.Require().Equal(instance.ProviderName, pool.ProviderName) + s.Require().Equal("removing pool: mocked removing pool error", err.Error()) } func TestPoolsTestSuite(t *testing.T) { + t.Parallel() suite.Run(t, new(PoolsTestSuite)) } diff --git a/database/sql/repositories.go b/database/sql/repositories.go index 72b535e8..d6cefc64 100644 --- a/database/sql/repositories.go +++ b/database/sql/repositories.go @@ -16,11 +16,11 @@ package sql import ( "context" - "errors" "fmt" "log/slog" "github.com/google/uuid" + "github.com/pkg/errors" "gorm.io/gorm" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -29,7 +29,10 @@ import ( "github.com/cloudbase/garm/params" ) -func (s *sqlDatabase) CreateRepository(ctx context.Context, owner, name string, credentials params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (param params.Repository, err error) { +func (s *sqlDatabase) CreateRepository(ctx context.Context, owner, name, credentialsName, webhookSecret string, poolBalancerType params.PoolBalancerType) (param params.Repository, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + defer func() { if err == nil { s.sendNotify(common.RepositoryEntityType, common.CreateOperation, param) @@ -51,68 +54,62 @@ func (s *sqlDatabase) CreateRepository(ctx context.Context, owner, name string, PoolBalancerType: poolBalancerType, } err = s.conn.Transaction(func(tx *gorm.DB) error { - switch credentials.ForgeType { - case params.GithubEndpointType: - newRepo.CredentialsID = &credentials.ID - case params.GiteaEndpointType: - newRepo.GiteaCredentialsID = &credentials.ID - default: - return runnerErrors.NewBadRequestError("unsupported credentials type") + creds, err := s.getGithubCredentialsByName(ctx, tx, credentialsName, false) + if err != nil { + return errors.Wrap(err, "creating repository") } + if creds.EndpointName == nil { + return errors.Wrap(runnerErrors.ErrUnprocessable, "credentials have no endpoint") + } + newRepo.CredentialsID = &creds.ID + newRepo.CredentialsName = creds.Name + newRepo.EndpointName = creds.EndpointName - newRepo.EndpointName = &credentials.Endpoint.Name q := tx.Create(&newRepo) if q.Error != nil { - return fmt.Errorf("error creating repository: %w", q.Error) + return errors.Wrap(q.Error, "creating repository") } + + newRepo.Credentials = creds + newRepo.Endpoint = creds.Endpoint + return nil }) if err != nil { - return params.Repository{}, fmt.Errorf("error creating repository: %w", err) + return params.Repository{}, errors.Wrap(err, "creating repository") } - ret, err := s.GetRepositoryByID(ctx, newRepo.ID.String()) + param, err = s.sqlToCommonRepository(newRepo, true) if err != nil { - return params.Repository{}, fmt.Errorf("error creating repository: %w", err) - } - - return ret, nil -} - -func (s *sqlDatabase) GetRepository(ctx context.Context, owner, name, endpointName string) (params.Repository, error) { - repo, err := s.getRepo(ctx, owner, name, endpointName) - if err != nil { - return params.Repository{}, fmt.Errorf("error fetching repo: %w", err) - } - - param, err := s.sqlToCommonRepository(repo, true) - if err != nil { - return params.Repository{}, fmt.Errorf("error fetching repo: %w", err) + return params.Repository{}, errors.Wrap(err, "creating repository") } return param, nil } -func (s *sqlDatabase) ListRepositories(_ context.Context, filter params.RepositoryFilter) ([]params.Repository, error) { +func (s *sqlDatabase) GetRepository(ctx context.Context, owner, name, endpointName string) (params.Repository, error) { + repo, err := s.getRepo(ctx, owner, name, endpointName) + if err != nil { + return params.Repository{}, errors.Wrap(err, "fetching repo") + } + + param, err := s.sqlToCommonRepository(repo, true) + if err != nil { + return params.Repository{}, errors.Wrap(err, "fetching repo") + } + + return param, nil +} + +func (s *sqlDatabase) ListRepositories(_ context.Context) ([]params.Repository, error) { var repos []Repository q := s.conn. Preload("Credentials"). - Preload("GiteaCredentials"). Preload("Credentials.Endpoint"). - Preload("GiteaCredentials.Endpoint"). - Preload("Endpoint") - if filter.Owner != "" { - q = q.Where("owner = ?", filter.Owner) - } - if filter.Name != "" { - q = q.Where("name = ?", filter.Name) - } - if filter.Endpoint != "" { - q = q.Where("endpoint_name = ?", filter.Endpoint) - } - q = q.Find(&repos) + Preload("Endpoint"). + Find(&repos) if q.Error != nil { - return []params.Repository{}, fmt.Errorf("error fetching user from database: %w", q.Error) + return []params.Repository{}, errors.Wrap(q.Error, "fetching user from database") } ret := make([]params.Repository, len(repos)) @@ -120,7 +117,7 @@ func (s *sqlDatabase) ListRepositories(_ context.Context, filter params.Reposito var err error ret[idx], err = s.sqlToCommonRepository(val, true) if err != nil { - return nil, fmt.Errorf("error fetching repositories: %w", err) + return nil, errors.Wrap(err, "fetching repositories") } } @@ -128,9 +125,12 @@ func (s *sqlDatabase) ListRepositories(_ context.Context, filter params.Reposito } func (s *sqlDatabase) DeleteRepository(ctx context.Context, repoID string) (err error) { - repo, err := s.getRepoByID(ctx, s.conn, repoID, "Endpoint", "Credentials", "Credentials.Endpoint", "GiteaCredentials", "GiteaCredentials.Endpoint") + s.writeMux.Lock() + defer s.writeMux.Unlock() + + repo, err := s.getRepoByID(ctx, s.conn, repoID, "Endpoint", "Credentials", "Credentials.Endpoint") if err != nil { - return fmt.Errorf("error fetching repo: %w", err) + return errors.Wrap(err, "fetching repo") } defer func(repo Repository) { @@ -146,13 +146,16 @@ func (s *sqlDatabase) DeleteRepository(ctx context.Context, repoID string) (err q := s.conn.Unscoped().Delete(&repo) if q.Error != nil && !errors.Is(q.Error, gorm.ErrRecordNotFound) { - return fmt.Errorf("error deleting repo: %w", q.Error) + return errors.Wrap(q.Error, "deleting repo") } return nil } func (s *sqlDatabase) UpdateRepository(ctx context.Context, repoID string, param params.UpdateEntityParams) (newParams params.Repository, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + defer func() { if err == nil { s.sendNotify(common.RepositoryEntityType, common.UpdateOperation, newParams) @@ -164,23 +167,24 @@ func (s *sqlDatabase) UpdateRepository(ctx context.Context, repoID string, param var err error repo, err = s.getRepoByID(ctx, tx, repoID) if err != nil { - return fmt.Errorf("error fetching repo: %w", err) + return errors.Wrap(err, "fetching repo") } if repo.EndpointName == nil { - return runnerErrors.NewUnprocessableError("repository has no endpoint") + return errors.Wrap(runnerErrors.ErrUnprocessable, "repository has no endpoint") } if param.CredentialsName != "" { + repo.CredentialsName = param.CredentialsName creds, err = s.getGithubCredentialsByName(ctx, tx, param.CredentialsName, false) if err != nil { - return fmt.Errorf("error fetching credentials: %w", err) + return errors.Wrap(err, "fetching credentials") } if creds.EndpointName == nil { - return runnerErrors.NewUnprocessableError("credentials have no endpoint") + return errors.Wrap(runnerErrors.ErrUnprocessable, "credentials have no endpoint") } if *creds.EndpointName != *repo.EndpointName { - return runnerErrors.NewBadRequestError("endpoint mismatch") + return errors.Wrap(runnerErrors.ErrBadRequest, "endpoint mismatch") } repo.CredentialsID = &creds.ID } @@ -199,45 +203,36 @@ func (s *sqlDatabase) UpdateRepository(ctx context.Context, repoID string, param q := tx.Save(&repo) if q.Error != nil { - return fmt.Errorf("error saving repo: %w", q.Error) + return errors.Wrap(q.Error, "saving repo") } return nil }) if err != nil { - return params.Repository{}, fmt.Errorf("error saving repo: %w", err) + return params.Repository{}, errors.Wrap(err, "saving repo") } - repo, err = s.getRepoByID(ctx, s.conn, repoID, "Endpoint", "Credentials", "Credentials.Endpoint", "GiteaCredentials", "GiteaCredentials.Endpoint") + repo, err = s.getRepoByID(ctx, s.conn, repoID, "Endpoint", "Credentials", "Credentials.Endpoint") if err != nil { - return params.Repository{}, fmt.Errorf("error updating enterprise: %w", err) + return params.Repository{}, errors.Wrap(err, "updating enterprise") } newParams, err = s.sqlToCommonRepository(repo, true) if err != nil { - return params.Repository{}, fmt.Errorf("error saving repo: %w", err) + return params.Repository{}, errors.Wrap(err, "saving repo") } return newParams, nil } func (s *sqlDatabase) GetRepositoryByID(ctx context.Context, repoID string) (params.Repository, error) { - preloadList := []string{ - "Pools", - "Credentials", - "Endpoint", - "Credentials.Endpoint", - "GiteaCredentials", - "GiteaCredentials.Endpoint", - "Events", - } - repo, err := s.getRepoByID(ctx, s.conn, repoID, preloadList...) + repo, err := s.getRepoByID(ctx, s.conn, repoID, "Pools", "Credentials", "Endpoint") if err != nil { - return params.Repository{}, fmt.Errorf("error fetching repo: %w", err) + return params.Repository{}, errors.Wrap(err, "fetching repo") } param, err := s.sqlToCommonRepository(repo, true) if err != nil { - return params.Repository{}, fmt.Errorf("error fetching repo: %w", err) + return params.Repository{}, errors.Wrap(err, "fetching repo") } return param, nil } @@ -248,8 +243,6 @@ func (s *sqlDatabase) getRepo(_ context.Context, owner, name, endpointName strin q := s.conn.Where("name = ? COLLATE NOCASE and owner = ? COLLATE NOCASE and endpoint_name = ? COLLATE NOCASE", name, owner, endpointName). Preload("Credentials"). Preload("Credentials.Endpoint"). - Preload("GiteaCredentials"). - Preload("GiteaCredentials.Endpoint"). Preload("Endpoint"). First(&repo) @@ -259,7 +252,7 @@ func (s *sqlDatabase) getRepo(_ context.Context, owner, name, endpointName strin if errors.Is(q.Error, gorm.ErrRecordNotFound) { return Repository{}, runnerErrors.ErrNotFound } - return Repository{}, fmt.Errorf("error fetching repository from database: %w", q.Error) + return Repository{}, errors.Wrap(q.Error, "fetching repository from database") } return repo, nil } @@ -267,7 +260,7 @@ func (s *sqlDatabase) getRepo(_ context.Context, owner, name, endpointName strin func (s *sqlDatabase) getRepoByID(_ context.Context, tx *gorm.DB, id string, preload ...string) (Repository, error) { u, err := uuid.Parse(id) if err != nil { - return Repository{}, runnerErrors.NewBadRequestError("error parsing id: %s", err) + return Repository{}, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") } var repo Repository @@ -283,7 +276,7 @@ func (s *sqlDatabase) getRepoByID(_ context.Context, tx *gorm.DB, id string, pre if errors.Is(q.Error, gorm.ErrRecordNotFound) { return Repository{}, runnerErrors.ErrNotFound } - return Repository{}, fmt.Errorf("error fetching repository from database: %w", q.Error) + return Repository{}, errors.Wrap(q.Error, "fetching repository from database") } return repo, nil } diff --git a/database/sql/repositories_test.go b/database/sql/repositories_test.go index b3c15eca..88fb577a 100644 --- a/database/sql/repositories_test.go +++ b/database/sql/repositories_test.go @@ -58,11 +58,9 @@ type RepoTestSuite struct { adminCtx context.Context adminUserID string - testCreds params.ForgeCredentials - testCredsGitea params.ForgeCredentials - secondaryTestCreds params.ForgeCredentials - githubEndpoint params.ForgeEndpoint - giteaEndpoint params.ForgeEndpoint + testCreds params.GithubCredentials + secondaryTestCreds params.GithubCredentials + githubEndpoint params.GithubEndpoint } func (s *RepoTestSuite) equalReposByName(expected, actual []params.Repository) { @@ -96,9 +94,6 @@ func (s *RepoTestSuite) assertSQLMockExpectations() { func (s *RepoTestSuite) SetupTest() { // create testing sqlite database - ctx := context.Background() - watcher.InitWatcher(ctx) - db, err := NewSQLDatabase(context.Background(), garmTesting.GetTestSqliteDBConfig(s.T())) if err != nil { s.FailNow(fmt.Sprintf("failed to create db connection: %s", err)) @@ -111,9 +106,7 @@ func (s *RepoTestSuite) SetupTest() { s.Require().NotEmpty(s.adminUserID) s.githubEndpoint = garmTesting.CreateDefaultGithubEndpoint(adminCtx, db, s.T()) - s.giteaEndpoint = garmTesting.CreateDefaultGiteaEndpoint(adminCtx, db, s.T()) s.testCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "new-creds", db, s.T(), s.githubEndpoint) - s.testCredsGitea = garmTesting.CreateTestGiteaCredentials(adminCtx, "new-creds", db, s.T(), s.giteaEndpoint) s.secondaryTestCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "secondary-creds", db, s.T(), s.githubEndpoint) // create some repository objects in the database, for testing purposes @@ -123,7 +116,7 @@ func (s *RepoTestSuite) SetupTest() { adminCtx, fmt.Sprintf("test-owner-%d", i), fmt.Sprintf("test-repo-%d", i), - s.testCreds, + s.testCreds.Name, fmt.Sprintf("test-webhook-secret-%d", i), params.PoolBalancerTypeRoundRobin, ) @@ -198,17 +191,13 @@ func (s *RepoTestSuite) SetupTest() { s.Fixtures = fixtures } -func (s *RepoTestSuite) TearDownTest() { - watcher.CloseWatcher() -} - func (s *RepoTestSuite) TestCreateRepository() { // call tested function repo, err := s.Store.CreateRepository( s.adminCtx, s.Fixtures.CreateRepoParams.Owner, s.Fixtures.CreateRepoParams.Name, - s.testCreds, + s.Fixtures.CreateRepoParams.CredentialsName, s.Fixtures.CreateRepoParams.WebhookSecret, params.PoolBalancerTypeRoundRobin, ) @@ -223,68 +212,6 @@ func (s *RepoTestSuite) TestCreateRepository() { s.Require().Equal(storeRepo.Name, repo.Name) s.Require().Equal(storeRepo.Credentials.Name, repo.Credentials.Name) s.Require().Equal(storeRepo.WebhookSecret, repo.WebhookSecret) - - entity, err := repo.GetEntity() - s.Require().Nil(err) - s.Require().Equal(s.Fixtures.CreateRepoParams.Owner, entity.Owner) - s.Require().Equal(entity.EntityType, params.ForgeEntityTypeRepository) - - forgeType, err := entity.GetForgeType() - s.Require().Nil(err) - s.Require().Equal(forgeType, params.GithubEndpointType) -} - -func (s *RepoTestSuite) TestCreateRepositoryGitea() { - // call tested function - repo, err := s.Store.CreateRepository( - s.adminCtx, - s.Fixtures.CreateRepoParams.Owner, - s.Fixtures.CreateRepoParams.Name, - s.testCredsGitea, - s.Fixtures.CreateRepoParams.WebhookSecret, - params.PoolBalancerTypeRoundRobin, - ) - - // assertions - s.Require().Nil(err) - storeRepo, err := s.Store.GetRepositoryByID(s.adminCtx, repo.ID) - if err != nil { - s.FailNow(fmt.Sprintf("failed to get repository by id: %v", err)) - } - s.Require().Equal(storeRepo.Owner, repo.Owner) - s.Require().Equal(storeRepo.Name, repo.Name) - s.Require().Equal(storeRepo.Credentials.Name, repo.Credentials.Name) - s.Require().Equal(storeRepo.WebhookSecret, repo.WebhookSecret) - - entity, err := repo.GetEntity() - s.Require().Nil(err) - s.Require().Equal(repo.ID, entity.ID) - s.Require().Equal(entity.EntityType, params.ForgeEntityTypeRepository) - - forgeType, err := entity.GetForgeType() - s.Require().Nil(err) - s.Require().Equal(forgeType, params.GiteaEndpointType) -} - -func (s *RepoTestSuite) TestCreateRepositoryInvalidForgeType() { - // call tested function - _, err := s.Store.CreateRepository( - s.adminCtx, - s.Fixtures.CreateRepoParams.Owner, - s.Fixtures.CreateRepoParams.Name, - params.ForgeCredentials{ - Name: "test-creds", - ForgeType: "invalid-forge-type", - Endpoint: params.ForgeEndpoint{ - Name: "test-endpoint", - }, - }, - s.Fixtures.CreateRepoParams.WebhookSecret, - params.PoolBalancerTypeRoundRobin, - ) - - s.Require().NotNil(err) - s.Require().Equal("error creating repository: unsupported credentials type", err.Error()) } func (s *RepoTestSuite) TestCreateRepositoryInvalidDBPassphrase() { @@ -304,7 +231,7 @@ func (s *RepoTestSuite) TestCreateRepositoryInvalidDBPassphrase() { s.adminCtx, s.Fixtures.CreateRepoParams.Owner, s.Fixtures.CreateRepoParams.Name, - s.testCreds, + s.Fixtures.CreateRepoParams.CredentialsName, s.Fixtures.CreateRepoParams.WebhookSecret, params.PoolBalancerTypeRoundRobin, ) @@ -315,6 +242,15 @@ func (s *RepoTestSuite) TestCreateRepositoryInvalidDBPassphrase() { func (s *RepoTestSuite) TestCreateRepositoryInvalidDBCreateErr() { s.Fixtures.SQLMock.ExpectBegin() + s.Fixtures.SQLMock. + ExpectQuery(regexp.QuoteMeta("SELECT * FROM `github_credentials` WHERE user_id = ? AND name = ? AND `github_credentials`.`deleted_at` IS NULL ORDER BY `github_credentials`.`id` LIMIT ?")). + WithArgs(s.adminUserID, s.Fixtures.Repos[0].CredentialsName, 1). + WillReturnRows(sqlmock.NewRows([]string{"id", "endpoint_name"}). + AddRow(s.testCreds.ID, s.githubEndpoint.Name)) + s.Fixtures.SQLMock.ExpectQuery(regexp.QuoteMeta("SELECT * FROM `github_endpoints` WHERE `github_endpoints`.`name` = ? AND `github_endpoints`.`deleted_at` IS NULL")). + WithArgs(s.testCreds.Endpoint.Name). + WillReturnRows(sqlmock.NewRows([]string{"name"}). + AddRow(s.githubEndpoint.Name)) s.Fixtures.SQLMock. ExpectExec(regexp.QuoteMeta("INSERT INTO `repositories`")). WillReturnError(fmt.Errorf("creating repo mock error")) @@ -324,13 +260,13 @@ func (s *RepoTestSuite) TestCreateRepositoryInvalidDBCreateErr() { s.adminCtx, s.Fixtures.CreateRepoParams.Owner, s.Fixtures.CreateRepoParams.Name, - s.testCreds, + s.Fixtures.CreateRepoParams.CredentialsName, s.Fixtures.CreateRepoParams.WebhookSecret, params.PoolBalancerTypeRoundRobin, ) s.Require().NotNil(err) - s.Require().Equal("error creating repository: error creating repository: creating repo mock error", err.Error()) + s.Require().Equal("creating repository: creating repository: creating repo mock error", err.Error()) s.assertSQLMockExpectations() } @@ -355,7 +291,7 @@ func (s *RepoTestSuite) TestGetRepositoryNotFound() { _, err := s.Store.GetRepository(s.adminCtx, "dummy-owner", "dummy-name", "github.com") s.Require().NotNil(err) - s.Require().Equal("error fetching repo: not found", err.Error()) + s.Require().Equal("fetching repo: not found", err.Error()) } func (s *RepoTestSuite) TestGetRepositoryDBDecryptingErr() { @@ -371,107 +307,26 @@ func (s *RepoTestSuite) TestGetRepositoryDBDecryptingErr() { _, err := s.StoreSQLMocked.GetRepository(s.adminCtx, s.Fixtures.Repos[0].Owner, s.Fixtures.Repos[0].Name, s.Fixtures.Repos[0].Endpoint.Name) s.Require().NotNil(err) - s.Require().Equal("error fetching repo: missing secret", err.Error()) + s.Require().Equal("fetching repo: missing secret", err.Error()) s.assertSQLMockExpectations() } func (s *RepoTestSuite) TestListRepositories() { - repos, err := s.Store.ListRepositories(s.adminCtx, params.RepositoryFilter{}) + repos, err := s.Store.ListRepositories(s.adminCtx) s.Require().Nil(err) s.equalReposByName(s.Fixtures.Repos, repos) } -func (s *RepoTestSuite) TestListRepositoriesWithFilters() { - repo, err := s.Store.CreateRepository( - s.adminCtx, - "test-owner", - "test-repo", - s.testCreds, - "super secret", - params.PoolBalancerTypeRoundRobin, - ) - s.Require().NoError(err) - - repo2, err := s.Store.CreateRepository( - s.adminCtx, - "test-owner", - "test-repo", - s.testCredsGitea, - "super secret", - params.PoolBalancerTypeRoundRobin, - ) - s.Require().NoError(err) - - repo3, err := s.Store.CreateRepository( - s.adminCtx, - "test-owner", - "test-repo2", - s.testCreds, - "super secret", - params.PoolBalancerTypeRoundRobin, - ) - s.Require().NoError(err) - - repo4, err := s.Store.CreateRepository( - s.adminCtx, - "test-owner2", - "test-repo", - s.testCreds, - "super secret", - params.PoolBalancerTypeRoundRobin, - ) - s.Require().NoError(err) - - repos, err := s.Store.ListRepositories( - s.adminCtx, - params.RepositoryFilter{ - Name: "test-repo", - }) - - s.Require().Nil(err) - s.equalReposByName([]params.Repository{repo, repo2, repo4}, repos) - - repos, err = s.Store.ListRepositories( - s.adminCtx, - params.RepositoryFilter{ - Name: "test-repo", - Owner: "test-owner", - }) - - s.Require().Nil(err) - s.equalReposByName([]params.Repository{repo, repo2}, repos) - - repos, err = s.Store.ListRepositories( - s.adminCtx, - params.RepositoryFilter{ - Name: "test-repo", - Owner: "test-owner", - Endpoint: s.giteaEndpoint.Name, - }) - - s.Require().Nil(err) - s.equalReposByName([]params.Repository{repo2}, repos) - - repos, err = s.Store.ListRepositories( - s.adminCtx, - params.RepositoryFilter{ - Name: "test-repo2", - }) - - s.Require().Nil(err) - s.equalReposByName([]params.Repository{repo3}, repos) -} - func (s *RepoTestSuite) TestListRepositoriesDBFetchErr() { s.Fixtures.SQLMock. ExpectQuery(regexp.QuoteMeta("SELECT * FROM `repositories` WHERE `repositories`.`deleted_at` IS NULL")). WillReturnError(fmt.Errorf("fetching user from database mock error")) - _, err := s.StoreSQLMocked.ListRepositories(s.adminCtx, params.RepositoryFilter{}) + _, err := s.StoreSQLMocked.ListRepositories(s.adminCtx) s.Require().NotNil(err) - s.Require().Equal("error fetching user from database: fetching user from database mock error", err.Error()) + s.Require().Equal("fetching user from database: fetching user from database mock error", err.Error()) s.assertSQLMockExpectations() } @@ -482,10 +337,10 @@ func (s *RepoTestSuite) TestListRepositoriesDBDecryptingErr() { ExpectQuery(regexp.QuoteMeta("SELECT * FROM `repositories` WHERE `repositories`.`deleted_at` IS NULL")). WillReturnRows(sqlmock.NewRows([]string{"id", "webhook_secret"}).AddRow(s.Fixtures.Repos[0].ID, s.Fixtures.Repos[0].WebhookSecret)) - _, err := s.StoreSQLMocked.ListRepositories(s.adminCtx, params.RepositoryFilter{}) + _, err := s.StoreSQLMocked.ListRepositories(s.adminCtx) s.Require().NotNil(err) - s.Require().Equal("error fetching repositories: error decrypting secret: invalid passphrase length (expected length 32 characters)", err.Error()) + s.Require().Equal("fetching repositories: decrypting secret: invalid passphrase length (expected length 32 characters)", err.Error()) s.assertSQLMockExpectations() } @@ -495,14 +350,14 @@ func (s *RepoTestSuite) TestDeleteRepository() { s.Require().Nil(err) _, err = s.Store.GetRepositoryByID(s.adminCtx, s.Fixtures.Repos[0].ID) s.Require().NotNil(err) - s.Require().Equal("error fetching repo: not found", err.Error()) + s.Require().Equal("fetching repo: not found", err.Error()) } func (s *RepoTestSuite) TestDeleteRepositoryInvalidRepoID() { err := s.Store.DeleteRepository(s.adminCtx, "dummy-repo-id") s.Require().NotNil(err) - s.Require().Equal("error fetching repo: error parsing id: invalid UUID length: 13", err.Error()) + s.Require().Equal("fetching repo: parsing id: invalid request", err.Error()) } func (s *RepoTestSuite) TestDeleteRepositoryDBRemoveErr() { @@ -520,7 +375,7 @@ func (s *RepoTestSuite) TestDeleteRepositoryDBRemoveErr() { err := s.StoreSQLMocked.DeleteRepository(s.adminCtx, s.Fixtures.Repos[0].ID) s.Require().NotNil(err) - s.Require().Equal("error deleting repo: mocked deleting repo error", err.Error()) + s.Require().Equal("deleting repo: mocked deleting repo error", err.Error()) s.assertSQLMockExpectations() } @@ -536,7 +391,7 @@ func (s *RepoTestSuite) TestUpdateRepositoryInvalidRepoID() { _, err := s.Store.UpdateRepository(s.adminCtx, "dummy-repo-id", s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("error saving repo: error fetching repo: error parsing id: invalid UUID length: 13", err.Error()) + s.Require().Equal("saving repo: fetching repo: parsing id: invalid request", err.Error()) } func (s *RepoTestSuite) TestUpdateRepositoryDBEncryptErr() { @@ -561,7 +416,7 @@ func (s *RepoTestSuite) TestUpdateRepositoryDBEncryptErr() { _, err := s.StoreSQLMocked.UpdateRepository(s.adminCtx, s.Fixtures.Repos[0].ID, s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("error saving repo: saving repo: failed to encrypt string: invalid passphrase length (expected length 32 characters)", err.Error()) + s.Require().Equal("saving repo: saving repo: failed to encrypt string: invalid passphrase length (expected length 32 characters)", err.Error()) s.assertSQLMockExpectations() } @@ -589,7 +444,7 @@ func (s *RepoTestSuite) TestUpdateRepositoryDBSaveErr() { _, err := s.StoreSQLMocked.UpdateRepository(s.adminCtx, s.Fixtures.Repos[0].ID, s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("error saving repo: error saving repo: saving repo mock error", err.Error()) + s.Require().Equal("saving repo: saving repo: saving repo mock error", err.Error()) s.assertSQLMockExpectations() } @@ -616,7 +471,7 @@ func (s *RepoTestSuite) TestUpdateRepositoryDBDecryptingErr() { _, err := s.StoreSQLMocked.UpdateRepository(s.adminCtx, s.Fixtures.Repos[0].ID, s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("error saving repo: saving repo: failed to encrypt string: invalid passphrase length (expected length 32 characters)", err.Error()) + s.Require().Equal("saving repo: saving repo: failed to encrypt string: invalid passphrase length (expected length 32 characters)", err.Error()) s.assertSQLMockExpectations() } @@ -631,7 +486,7 @@ func (s *RepoTestSuite) TestGetRepositoryByIDInvalidRepoID() { _, err := s.Store.GetRepositoryByID(s.adminCtx, "dummy-repo-id") s.Require().NotNil(err) - s.Require().Equal("error fetching repo: error parsing id: invalid UUID length: 13", err.Error()) + s.Require().Equal("fetching repo: parsing id: invalid request", err.Error()) } func (s *RepoTestSuite) TestGetRepositoryByIDDBDecryptingErr() { @@ -639,10 +494,6 @@ func (s *RepoTestSuite) TestGetRepositoryByIDDBDecryptingErr() { ExpectQuery(regexp.QuoteMeta("SELECT * FROM `repositories` WHERE id = ? AND `repositories`.`deleted_at` IS NULL ORDER BY `repositories`.`id` LIMIT ?")). WithArgs(s.Fixtures.Repos[0].ID, 1). WillReturnRows(sqlmock.NewRows([]string{"id"}).AddRow(s.Fixtures.Repos[0].ID)) - s.Fixtures.SQLMock. - ExpectQuery(regexp.QuoteMeta("SELECT * FROM `repository_events` WHERE `repository_events`.`repo_id` = ? AND `repository_events`.`deleted_at` IS NULL")). - WithArgs(s.Fixtures.Repos[0].ID). - WillReturnRows(sqlmock.NewRows([]string{"repo_id"}).AddRow(s.Fixtures.Repos[0].ID)) s.Fixtures.SQLMock. ExpectQuery(regexp.QuoteMeta("SELECT * FROM `pools` WHERE `pools`.`repo_id` = ? AND `pools`.`deleted_at` IS NULL")). WithArgs(s.Fixtures.Repos[0].ID). @@ -651,7 +502,7 @@ func (s *RepoTestSuite) TestGetRepositoryByIDDBDecryptingErr() { _, err := s.StoreSQLMocked.GetRepositoryByID(s.adminCtx, s.Fixtures.Repos[0].ID) s.Require().NotNil(err) - s.Require().Equal("error fetching repo: missing secret", err.Error()) + s.Require().Equal("fetching repo: missing secret", err.Error()) s.assertSQLMockExpectations() } @@ -683,14 +534,14 @@ func (s *RepoTestSuite) TestCreateRepositoryPoolMissingTags() { } func (s *RepoTestSuite) TestCreateRepositoryPoolInvalidRepoID() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: "dummy-repo-id", - EntityType: params.ForgeEntityTypeRepository, + EntityType: params.GithubEntityTypeRepository, } _, err := s.Store.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("error parsing id: invalid request", err.Error()) + s.Require().Equal("parsing id: invalid request", err.Error()) } func (s *RepoTestSuite) TestCreateRepositoryPoolDBFetchTagErr() { @@ -709,7 +560,7 @@ func (s *RepoTestSuite) TestCreateRepositoryPoolDBFetchTagErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("error creating tag: error fetching tag from database: mocked fetching tag error", err.Error()) + s.Require().Equal("creating tag: fetching tag from database: mocked fetching tag error", err.Error()) s.assertSQLMockExpectations() } @@ -738,7 +589,7 @@ func (s *RepoTestSuite) TestCreateRepositoryPoolDBAddingPoolErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("error creating pool: mocked adding pool error", err.Error()) + s.Require().Equal("creating pool: mocked adding pool error", err.Error()) s.assertSQLMockExpectations() } @@ -769,7 +620,7 @@ func (s *RepoTestSuite) TestCreateRepositoryPoolDBSaveTagErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("error associating tags: mocked saving tag error", err.Error()) + s.Require().Equal("associating tags: mocked saving tag error", err.Error()) s.assertSQLMockExpectations() } @@ -810,7 +661,7 @@ func (s *RepoTestSuite) TestCreateRepositoryPoolDBFetchPoolErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("error fetching pool by ID: not found", err.Error()) + s.Require().Equal("fetching pool: not found", err.Error()) s.assertSQLMockExpectations() } @@ -834,14 +685,14 @@ func (s *RepoTestSuite) TestListRepoPools() { } func (s *RepoTestSuite) TestListRepoPoolsInvalidRepoID() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: "dummy-repo-id", - EntityType: params.ForgeEntityTypeRepository, + EntityType: params.GithubEntityTypeRepository, } _, err := s.Store.ListEntityPools(s.adminCtx, entity) s.Require().NotNil(err) - s.Require().Equal("error fetching pools: error parsing id: invalid request", err.Error()) + s.Require().Equal("fetching pools: parsing id: invalid request", err.Error()) } func (s *RepoTestSuite) TestGetRepositoryPool() { @@ -859,14 +710,14 @@ func (s *RepoTestSuite) TestGetRepositoryPool() { } func (s *RepoTestSuite) TestGetRepositoryPoolInvalidRepoID() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: "dummy-repo-id", - EntityType: params.ForgeEntityTypeRepository, + EntityType: params.GithubEntityTypeRepository, } _, err := s.Store.GetEntityPool(s.adminCtx, entity, "dummy-pool-id") s.Require().NotNil(err) - s.Require().Equal("fetching pool: error parsing id: invalid request", err.Error()) + s.Require().Equal("fetching pool: parsing id: invalid request", err.Error()) } func (s *RepoTestSuite) TestDeleteRepositoryPool() { @@ -881,18 +732,18 @@ func (s *RepoTestSuite) TestDeleteRepositoryPool() { s.Require().Nil(err) _, err = s.Store.GetEntityPool(s.adminCtx, entity, pool.ID) - s.Require().Equal("fetching pool: error finding pool: not found", err.Error()) + s.Require().Equal("fetching pool: finding pool: not found", err.Error()) } func (s *RepoTestSuite) TestDeleteRepositoryPoolInvalidRepoID() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: "dummy-repo-id", - EntityType: params.ForgeEntityTypeRepository, + EntityType: params.GithubEntityTypeRepository, } err := s.Store.DeleteEntityPool(s.adminCtx, entity, "dummy-pool-id") s.Require().NotNil(err) - s.Require().Equal("error parsing id: invalid request", err.Error()) + s.Require().Equal("parsing id: invalid request", err.Error()) } func (s *RepoTestSuite) TestDeleteRepositoryPoolDBDeleteErr() { @@ -913,7 +764,7 @@ func (s *RepoTestSuite) TestDeleteRepositoryPoolDBDeleteErr() { err = s.StoreSQLMocked.DeleteEntityPool(s.adminCtx, entity, pool.ID) s.Require().NotNil(err) - s.Require().Equal("error removing pool: mocked deleting pool error", err.Error()) + s.Require().Equal("removing pool: mocked deleting pool error", err.Error()) s.assertSQLMockExpectations() } @@ -941,14 +792,14 @@ func (s *RepoTestSuite) TestListRepoInstances() { } func (s *RepoTestSuite) TestListRepoInstancesInvalidRepoID() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: "dummy-repo-id", - EntityType: params.ForgeEntityTypeRepository, + EntityType: params.GithubEntityTypeRepository, } _, err := s.Store.ListEntityInstances(s.adminCtx, entity) s.Require().NotNil(err) - s.Require().Equal("error fetching entity: error parsing id: invalid request", err.Error()) + s.Require().Equal("fetching entity: parsing id: invalid request", err.Error()) } func (s *RepoTestSuite) TestUpdateRepositoryPool() { @@ -969,39 +820,18 @@ func (s *RepoTestSuite) TestUpdateRepositoryPool() { } func (s *RepoTestSuite) TestUpdateRepositoryPoolInvalidRepoID() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: "dummy-repo-id", - EntityType: params.ForgeEntityTypeRepository, + EntityType: params.GithubEntityTypeRepository, } _, err := s.Store.UpdateEntityPool(s.adminCtx, entity, "dummy-repo-id", s.Fixtures.UpdatePoolParams) s.Require().NotNil(err) - s.Require().Equal("error fetching pool: error parsing id: invalid request", err.Error()) -} - -func (s *RepoTestSuite) TestAddRepoEntityEvent() { - repo, err := s.Store.CreateRepository( - s.adminCtx, - s.Fixtures.CreateRepoParams.Owner, - s.Fixtures.CreateRepoParams.Name, - s.testCreds, - s.Fixtures.CreateRepoParams.WebhookSecret, - params.PoolBalancerTypeRoundRobin) - - s.Require().Nil(err) - entity, err := repo.GetEntity() - s.Require().Nil(err) - err = s.Store.AddEntityEvent(s.adminCtx, entity, params.StatusEvent, params.EventInfo, "this is a test", 20) - s.Require().Nil(err) - - repo, err = s.Store.GetRepositoryByID(s.adminCtx, repo.ID) - s.Require().Nil(err) - s.Require().Equal(1, len(repo.Events)) - s.Require().Equal(params.StatusEvent, repo.Events[0].EventType) - s.Require().Equal(params.EventInfo, repo.Events[0].EventLevel) - s.Require().Equal("this is a test", repo.Events[0].Message) + s.Require().Equal("fetching pool: parsing id: invalid request", err.Error()) } func TestRepoTestSuite(t *testing.T) { + t.Parallel() + suite.Run(t, new(RepoTestSuite)) } diff --git a/database/sql/scaleset_instances.go b/database/sql/scaleset_instances.go deleted file mode 100644 index 457c99b5..00000000 --- a/database/sql/scaleset_instances.go +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package sql - -import ( - "context" - "fmt" - - "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/params" -) - -func (s *sqlDatabase) CreateScaleSetInstance(_ context.Context, scaleSetID uint, param params.CreateInstanceParams) (instance params.Instance, err error) { - scaleSet, err := s.getScaleSetByID(s.conn, scaleSetID) - if err != nil { - return params.Instance{}, fmt.Errorf("error fetching scale set: %w", err) - } - - defer func() { - if err == nil { - s.sendNotify(common.InstanceEntityType, common.CreateOperation, instance) - } - }() - - var secret []byte - if len(param.JitConfiguration) > 0 { - secret, err = s.marshalAndSeal(param.JitConfiguration) - if err != nil { - return params.Instance{}, fmt.Errorf("error marshalling jit config: %w", err) - } - } - - newInstance := Instance{ - ScaleSet: scaleSet, - Name: param.Name, - Status: param.Status, - RunnerStatus: param.RunnerStatus, - OSType: param.OSType, - OSArch: param.OSArch, - CallbackURL: param.CallbackURL, - MetadataURL: param.MetadataURL, - GitHubRunnerGroup: param.GitHubRunnerGroup, - JitConfiguration: secret, - AgentID: param.AgentID, - } - q := s.conn.Create(&newInstance) - if q.Error != nil { - return params.Instance{}, fmt.Errorf("error creating instance: %w", q.Error) - } - - return s.sqlToParamsInstance(newInstance) -} - -func (s *sqlDatabase) ListScaleSetInstances(_ context.Context, scalesetID uint) ([]params.Instance, error) { - var instances []Instance - query := s.conn. - Preload("ScaleSet"). - Preload("Job"). - Where("scale_set_fk_id = ?", scalesetID) - - if err := query.Find(&instances); err.Error != nil { - return nil, fmt.Errorf("error fetching instances: %w", err.Error) - } - - var err error - ret := make([]params.Instance, len(instances)) - for idx, inst := range instances { - ret[idx], err = s.sqlToParamsInstance(inst) - if err != nil { - return nil, fmt.Errorf("error converting instance: %w", err) - } - } - return ret, nil -} diff --git a/database/sql/scalesets.go b/database/sql/scalesets.go deleted file mode 100644 index 5877ad5c..00000000 --- a/database/sql/scalesets.go +++ /dev/null @@ -1,458 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package sql - -import ( - "context" - "errors" - "fmt" - - "github.com/google/uuid" - "gorm.io/datatypes" - "gorm.io/gorm" - - runnerErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/params" -) - -func (s *sqlDatabase) ListAllScaleSets(_ context.Context) ([]params.ScaleSet, error) { - var scaleSets []ScaleSet - - q := s.conn.Model(&ScaleSet{}). - Preload("Organization"). - Preload("Organization.Endpoint"). - Preload("Repository"). - Preload("Repository.Endpoint"). - Preload("Enterprise"). - Preload("Enterprise.Endpoint"). - Omit("extra_specs"). - Omit("status_messages"). - Find(&scaleSets) - if q.Error != nil { - return nil, fmt.Errorf("error fetching all scale sets: %w", q.Error) - } - - ret := make([]params.ScaleSet, len(scaleSets)) - var err error - for idx, val := range scaleSets { - ret[idx], err = s.sqlToCommonScaleSet(val) - if err != nil { - return nil, fmt.Errorf("error converting scale sets: %w", err) - } - } - return ret, nil -} - -func (s *sqlDatabase) CreateEntityScaleSet(_ context.Context, entity params.ForgeEntity, param params.CreateScaleSetParams) (scaleSet params.ScaleSet, err error) { - if err := param.Validate(); err != nil { - return params.ScaleSet{}, fmt.Errorf("failed to validate create params: %w", err) - } - - defer func() { - if err == nil { - s.sendNotify(common.ScaleSetEntityType, common.CreateOperation, scaleSet) - } - }() - - newScaleSet := ScaleSet{ - Name: param.Name, - ScaleSetID: param.ScaleSetID, - DisableUpdate: param.DisableUpdate, - ProviderName: param.ProviderName, - RunnerPrefix: param.GetRunnerPrefix(), - MaxRunners: param.MaxRunners, - MinIdleRunners: param.MinIdleRunners, - RunnerBootstrapTimeout: param.RunnerBootstrapTimeout, - Image: param.Image, - Flavor: param.Flavor, - OSType: param.OSType, - OSArch: param.OSArch, - Enabled: param.Enabled, - GitHubRunnerGroup: param.GitHubRunnerGroup, - State: params.ScaleSetPendingCreate, - } - - if len(param.ExtraSpecs) > 0 { - newScaleSet.ExtraSpecs = datatypes.JSON(param.ExtraSpecs) - } - - entityID, err := uuid.Parse(entity.ID) - if err != nil { - return params.ScaleSet{}, fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) - } - - switch entity.EntityType { - case params.ForgeEntityTypeRepository: - newScaleSet.RepoID = &entityID - case params.ForgeEntityTypeOrganization: - newScaleSet.OrgID = &entityID - case params.ForgeEntityTypeEnterprise: - newScaleSet.EnterpriseID = &entityID - } - err = s.conn.Transaction(func(tx *gorm.DB) error { - if err := s.hasGithubEntity(tx, entity.EntityType, entity.ID); err != nil { - return fmt.Errorf("error checking entity existence: %w", err) - } - - q := tx.Create(&newScaleSet) - if q.Error != nil { - return fmt.Errorf("error creating scale set: %w", q.Error) - } - - return nil - }) - if err != nil { - return params.ScaleSet{}, err - } - - dbScaleSet, err := s.getScaleSetByID(s.conn, newScaleSet.ID, "Instances", "Enterprise", "Organization", "Repository") - if err != nil { - return params.ScaleSet{}, fmt.Errorf("error fetching scale set: %w", err) - } - - return s.sqlToCommonScaleSet(dbScaleSet) -} - -func (s *sqlDatabase) listEntityScaleSets(tx *gorm.DB, entityType params.ForgeEntityType, entityID string, preload ...string) ([]ScaleSet, error) { - if _, err := uuid.Parse(entityID); err != nil { - return nil, fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) - } - - if err := s.hasGithubEntity(tx, entityType, entityID); err != nil { - return nil, fmt.Errorf("error checking entity existence: %w", err) - } - - var preloadEntity string - var fieldName string - switch entityType { - case params.ForgeEntityTypeRepository: - fieldName = entityTypeRepoName - preloadEntity = repositoryFieldName - case params.ForgeEntityTypeOrganization: - fieldName = entityTypeOrgName - preloadEntity = organizationFieldName - case params.ForgeEntityTypeEnterprise: - fieldName = entityTypeEnterpriseName - preloadEntity = enterpriseFieldName - default: - return nil, fmt.Errorf("invalid entityType: %v", entityType) - } - - q := tx - q = q.Preload(preloadEntity) - if len(preload) > 0 { - for _, item := range preload { - q = q.Preload(item) - } - } - - var scaleSets []ScaleSet - condition := fmt.Sprintf("%s = ?", fieldName) - err := q.Model(&ScaleSet{}). - Where(condition, entityID). - Omit("extra_specs"). - Omit("status_messages"). - Find(&scaleSets).Error - if err != nil { - if errors.Is(err, gorm.ErrRecordNotFound) { - return []ScaleSet{}, nil - } - return nil, fmt.Errorf("error fetching scale sets: %w", err) - } - - return scaleSets, nil -} - -func (s *sqlDatabase) ListEntityScaleSets(_ context.Context, entity params.ForgeEntity) ([]params.ScaleSet, error) { - scaleSets, err := s.listEntityScaleSets(s.conn, entity.EntityType, entity.ID) - if err != nil { - return nil, fmt.Errorf("error fetching scale sets: %w", err) - } - - ret := make([]params.ScaleSet, len(scaleSets)) - for idx, set := range scaleSets { - ret[idx], err = s.sqlToCommonScaleSet(set) - if err != nil { - return nil, fmt.Errorf("error conbverting scale set: %w", err) - } - } - - return ret, nil -} - -func (s *sqlDatabase) UpdateEntityScaleSet(ctx context.Context, entity params.ForgeEntity, scaleSetID uint, param params.UpdateScaleSetParams, callback func(old, newSet params.ScaleSet) error) (updatedScaleSet params.ScaleSet, err error) { - defer func() { - if err == nil { - s.sendNotify(common.ScaleSetEntityType, common.UpdateOperation, updatedScaleSet) - } - }() - err = s.conn.Transaction(func(tx *gorm.DB) error { - scaleSet, err := s.getEntityScaleSet(tx, entity.EntityType, entity.ID, scaleSetID, "Instances") - if err != nil { - return fmt.Errorf("error fetching scale set: %w", err) - } - - old, err := s.sqlToCommonScaleSet(scaleSet) - if err != nil { - return fmt.Errorf("error converting scale set: %w", err) - } - - updatedScaleSet, err = s.updateScaleSet(tx, scaleSet, param) - if err != nil { - return fmt.Errorf("error updating scale set: %w", err) - } - - if callback != nil { - if err := callback(old, updatedScaleSet); err != nil { - return fmt.Errorf("error executing update callback: %w", err) - } - } - return nil - }) - if err != nil { - return params.ScaleSet{}, err - } - - updatedScaleSet, err = s.GetScaleSetByID(ctx, scaleSetID) - if err != nil { - return params.ScaleSet{}, err - } - return updatedScaleSet, nil -} - -func (s *sqlDatabase) getEntityScaleSet(tx *gorm.DB, entityType params.ForgeEntityType, entityID string, scaleSetID uint, preload ...string) (ScaleSet, error) { - if entityID == "" { - return ScaleSet{}, fmt.Errorf("error missing entity id: %w", runnerErrors.ErrBadRequest) - } - - if scaleSetID == 0 { - return ScaleSet{}, fmt.Errorf("error missing scaleset id: %w", runnerErrors.ErrBadRequest) - } - - var fieldName string - var entityField string - switch entityType { - case params.ForgeEntityTypeRepository: - fieldName = entityTypeRepoName - entityField = "Repository" - case params.ForgeEntityTypeOrganization: - fieldName = entityTypeOrgName - entityField = "Organization" - case params.ForgeEntityTypeEnterprise: - fieldName = entityTypeEnterpriseName - entityField = "Enterprise" - default: - return ScaleSet{}, fmt.Errorf("invalid entityType: %v", entityType) - } - - q := tx - q = q.Preload(entityField) - if len(preload) > 0 { - for _, item := range preload { - q = q.Preload(item) - } - } - - var scaleSet ScaleSet - condition := fmt.Sprintf("id = ? and %s = ?", fieldName) - err := q.Model(&ScaleSet{}). - Where(condition, scaleSetID, entityID). - First(&scaleSet).Error - if err != nil { - if errors.Is(err, gorm.ErrRecordNotFound) { - return ScaleSet{}, fmt.Errorf("error finding scale set: %w", runnerErrors.ErrNotFound) - } - return ScaleSet{}, fmt.Errorf("error fetching scale set: %w", err) - } - - return scaleSet, nil -} - -func (s *sqlDatabase) updateScaleSet(tx *gorm.DB, scaleSet ScaleSet, param params.UpdateScaleSetParams) (params.ScaleSet, error) { - if param.Enabled != nil && scaleSet.Enabled != *param.Enabled { - scaleSet.Enabled = *param.Enabled - } - - if param.State != nil && *param.State != scaleSet.State { - scaleSet.State = *param.State - } - - if param.ExtendedState != nil && *param.ExtendedState != scaleSet.ExtendedState { - scaleSet.ExtendedState = *param.ExtendedState - } - - if param.ScaleSetID != 0 { - scaleSet.ScaleSetID = param.ScaleSetID - } - - if param.Name != "" { - scaleSet.Name = param.Name - } - - if param.GitHubRunnerGroup != nil && *param.GitHubRunnerGroup != "" { - scaleSet.GitHubRunnerGroup = *param.GitHubRunnerGroup - } - - if param.Flavor != "" { - scaleSet.Flavor = param.Flavor - } - - if param.Image != "" { - scaleSet.Image = param.Image - } - - if param.Prefix != "" { - scaleSet.RunnerPrefix = param.Prefix - } - - if param.MaxRunners != nil { - scaleSet.MaxRunners = *param.MaxRunners - } - - if param.MinIdleRunners != nil { - scaleSet.MinIdleRunners = *param.MinIdleRunners - } - - if param.OSArch != "" { - scaleSet.OSArch = param.OSArch - } - - if param.OSType != "" { - scaleSet.OSType = param.OSType - } - - if param.ExtraSpecs != nil { - scaleSet.ExtraSpecs = datatypes.JSON(param.ExtraSpecs) - } - - if param.RunnerBootstrapTimeout != nil && *param.RunnerBootstrapTimeout > 0 { - scaleSet.RunnerBootstrapTimeout = *param.RunnerBootstrapTimeout - } - - if param.GitHubRunnerGroup != nil { - scaleSet.GitHubRunnerGroup = *param.GitHubRunnerGroup - } - - if q := tx.Save(&scaleSet); q.Error != nil { - return params.ScaleSet{}, fmt.Errorf("error saving database entry: %w", q.Error) - } - - return s.sqlToCommonScaleSet(scaleSet) -} - -func (s *sqlDatabase) GetScaleSetByID(_ context.Context, scaleSet uint) (params.ScaleSet, error) { - set, err := s.getScaleSetByID( - s.conn, - scaleSet, - "Instances", - "Enterprise", - "Enterprise.Endpoint", - "Organization", - "Organization.Endpoint", - "Repository", - "Repository.Endpoint", - ) - if err != nil { - return params.ScaleSet{}, fmt.Errorf("error fetching scale set by ID: %w", err) - } - return s.sqlToCommonScaleSet(set) -} - -func (s *sqlDatabase) DeleteScaleSetByID(_ context.Context, scaleSetID uint) (err error) { - var scaleSet params.ScaleSet - defer func() { - if err == nil && scaleSet.ID != 0 { - s.sendNotify(common.ScaleSetEntityType, common.DeleteOperation, scaleSet) - } - }() - err = s.conn.Transaction(func(tx *gorm.DB) error { - dbSet, err := s.getScaleSetByID(tx, scaleSetID, "Instances", "Enterprise", "Organization", "Repository") - if err != nil { - return fmt.Errorf("error fetching scale set: %w", err) - } - - if len(dbSet.Instances) > 0 { - return runnerErrors.NewBadRequestError("cannot delete scaleset with runners") - } - scaleSet, err = s.sqlToCommonScaleSet(dbSet) - if err != nil { - return fmt.Errorf("error converting scale set: %w", err) - } - - if q := tx.Unscoped().Delete(&dbSet); q.Error != nil { - return fmt.Errorf("error deleting scale set: %w", q.Error) - } - return nil - }) - if err != nil { - return fmt.Errorf("error removing scale set: %w", err) - } - return nil -} - -func (s *sqlDatabase) SetScaleSetLastMessageID(_ context.Context, scaleSetID uint, lastMessageID int64) (err error) { - var scaleSet params.ScaleSet - defer func() { - if err == nil && scaleSet.ID != 0 { - s.sendNotify(common.ScaleSetEntityType, common.UpdateOperation, scaleSet) - } - }() - if err := s.conn.Transaction(func(tx *gorm.DB) error { - dbSet, err := s.getScaleSetByID(tx, scaleSetID, "Instances", "Enterprise", "Organization", "Repository") - if err != nil { - return fmt.Errorf("error fetching scale set: %w", err) - } - dbSet.LastMessageID = lastMessageID - if err := tx.Save(&dbSet).Error; err != nil { - return fmt.Errorf("error saving database entry: %w", err) - } - scaleSet, err = s.sqlToCommonScaleSet(dbSet) - if err != nil { - return fmt.Errorf("error converting scale set: %w", err) - } - return nil - }); err != nil { - return fmt.Errorf("error setting last message ID: %w", err) - } - return nil -} - -func (s *sqlDatabase) SetScaleSetDesiredRunnerCount(_ context.Context, scaleSetID uint, desiredRunnerCount int) (err error) { - var scaleSet params.ScaleSet - defer func() { - if err == nil && scaleSet.ID != 0 { - s.sendNotify(common.ScaleSetEntityType, common.UpdateOperation, scaleSet) - } - }() - if err := s.conn.Transaction(func(tx *gorm.DB) error { - dbSet, err := s.getScaleSetByID(tx, scaleSetID, "Instances", "Enterprise", "Organization", "Repository") - if err != nil { - return fmt.Errorf("error fetching scale set: %w", err) - } - dbSet.DesiredRunnerCount = desiredRunnerCount - if err := tx.Save(&dbSet).Error; err != nil { - return fmt.Errorf("error saving database entry: %w", err) - } - scaleSet, err = s.sqlToCommonScaleSet(dbSet) - if err != nil { - return fmt.Errorf("error converting scale set: %w", err) - } - return nil - }); err != nil { - return fmt.Errorf("error setting desired runner count: %w", err) - } - return nil -} diff --git a/database/sql/scalesets_test.go b/database/sql/scalesets_test.go deleted file mode 100644 index f1f9fbba..00000000 --- a/database/sql/scalesets_test.go +++ /dev/null @@ -1,368 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package sql - -import ( - "context" - "encoding/json" - "fmt" - "testing" - - "github.com/stretchr/testify/suite" - - commonParams "github.com/cloudbase/garm-provider-common/params" - dbCommon "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/database/watcher" - garmTesting "github.com/cloudbase/garm/internal/testing" - "github.com/cloudbase/garm/params" -) - -type ScaleSetsTestSuite struct { - suite.Suite - Store dbCommon.Store - adminCtx context.Context - creds params.ForgeCredentials - - org params.Organization - repo params.Repository - enterprise params.Enterprise - - orgEntity params.ForgeEntity - repoEntity params.ForgeEntity - enterpriseEntity params.ForgeEntity -} - -func (s *ScaleSetsTestSuite) SetupTest() { - // create testing sqlite database - ctx := context.Background() - watcher.InitWatcher(ctx) - - db, err := NewSQLDatabase(context.Background(), garmTesting.GetTestSqliteDBConfig(s.T())) - if err != nil { - s.FailNow(fmt.Sprintf("failed to create db connection: %s", err)) - } - s.Store = db - - adminCtx := garmTesting.ImpersonateAdminContext(ctx, db, s.T()) - s.adminCtx = adminCtx - - githubEndpoint := garmTesting.CreateDefaultGithubEndpoint(adminCtx, db, s.T()) - s.creds = garmTesting.CreateTestGithubCredentials(adminCtx, "new-creds", db, s.T(), githubEndpoint) - - // create an organization for testing purposes - s.org, err = s.Store.CreateOrganization(s.adminCtx, "test-org", s.creds, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) - if err != nil { - s.FailNow(fmt.Sprintf("failed to create org: %s", err)) - } - - s.repo, err = s.Store.CreateRepository(s.adminCtx, "test-org", "test-repo", s.creds, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) - if err != nil { - s.FailNow(fmt.Sprintf("failed to create repo: %s", err)) - } - - s.enterprise, err = s.Store.CreateEnterprise(s.adminCtx, "test-enterprise", s.creds, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) - if err != nil { - s.FailNow(fmt.Sprintf("failed to create enterprise: %s", err)) - } - - s.orgEntity, err = s.org.GetEntity() - if err != nil { - s.FailNow(fmt.Sprintf("failed to get org entity: %s", err)) - } - - s.repoEntity, err = s.repo.GetEntity() - if err != nil { - s.FailNow(fmt.Sprintf("failed to get repo entity: %s", err)) - } - - s.enterpriseEntity, err = s.enterprise.GetEntity() - if err != nil { - s.FailNow(fmt.Sprintf("failed to get enterprise entity: %s", err)) - } - - s.T().Cleanup(func() { - err := s.Store.DeleteOrganization(s.adminCtx, s.org.ID) - if err != nil { - s.FailNow(fmt.Sprintf("failed to delete org: %s", err)) - } - err = s.Store.DeleteRepository(s.adminCtx, s.repo.ID) - if err != nil { - s.FailNow(fmt.Sprintf("failed to delete repo: %s", err)) - } - err = s.Store.DeleteEnterprise(s.adminCtx, s.enterprise.ID) - if err != nil { - s.FailNow(fmt.Sprintf("failed to delete enterprise: %s", err)) - } - }) -} - -func (s *ScaleSetsTestSuite) TearDownTest() { - watcher.CloseWatcher() -} - -func (s *ScaleSetsTestSuite) callback(old, newSet params.ScaleSet) error { - s.Require().Equal(old.Name, "test-scaleset") - s.Require().Equal(newSet.Name, "test-scaleset-updated") - s.Require().Equal(old.OSType, commonParams.Linux) - s.Require().Equal(newSet.OSType, commonParams.Windows) - s.Require().Equal(old.OSArch, commonParams.Amd64) - s.Require().Equal(newSet.OSArch, commonParams.Arm64) - s.Require().Equal(old.ExtraSpecs, json.RawMessage(`{"test": 1}`)) - s.Require().Equal(newSet.ExtraSpecs, json.RawMessage(`{"test": 111}`)) - s.Require().Equal(old.MaxRunners, uint(10)) - s.Require().Equal(newSet.MaxRunners, uint(60)) - s.Require().Equal(old.MinIdleRunners, uint(5)) - s.Require().Equal(newSet.MinIdleRunners, uint(50)) - s.Require().Equal(old.Image, "test-image") - s.Require().Equal(newSet.Image, "new-test-image") - s.Require().Equal(old.Flavor, "test-flavor") - s.Require().Equal(newSet.Flavor, "new-test-flavor") - s.Require().Equal(old.GitHubRunnerGroup, "test-group") - s.Require().Equal(newSet.GitHubRunnerGroup, "new-test-group") - s.Require().Equal(old.RunnerPrefix.Prefix, "garm") - s.Require().Equal(newSet.RunnerPrefix.Prefix, "test-prefix2") - s.Require().Equal(old.Enabled, false) - s.Require().Equal(newSet.Enabled, true) - return nil -} - -func (s *ScaleSetsTestSuite) TestScaleSetOperations() { - // create a scale set for the organization - createScaleSetPrams := params.CreateScaleSetParams{ - Name: "test-scaleset", - ProviderName: "test-provider", - MaxRunners: 10, - MinIdleRunners: 5, - Image: "test-image", - Flavor: "test-flavor", - OSType: commonParams.Linux, - OSArch: commonParams.Amd64, - ExtraSpecs: json.RawMessage(`{"test": 1}`), - GitHubRunnerGroup: "test-group", - } - - var orgScaleSet params.ScaleSet - var repoScaleSet params.ScaleSet - var enterpriseScaleSet params.ScaleSet - var err error - - s.T().Run("create org scaleset", func(_ *testing.T) { - orgScaleSet, err = s.Store.CreateEntityScaleSet(s.adminCtx, s.orgEntity, createScaleSetPrams) - s.Require().NoError(err) - s.Require().NotNil(orgScaleSet) - s.Require().Equal(orgScaleSet.Name, createScaleSetPrams.Name) - s.T().Cleanup(func() { - err := s.Store.DeleteScaleSetByID(s.adminCtx, orgScaleSet.ID) - if err != nil { - s.FailNow(fmt.Sprintf("failed to delete scaleset: %s", err)) - } - }) - }) - - s.T().Run("create repo scaleset", func(_ *testing.T) { - repoScaleSet, err = s.Store.CreateEntityScaleSet(s.adminCtx, s.repoEntity, createScaleSetPrams) - s.Require().NoError(err) - s.Require().NotNil(repoScaleSet) - s.Require().Equal(repoScaleSet.Name, createScaleSetPrams.Name) - s.T().Cleanup(func() { - err := s.Store.DeleteScaleSetByID(s.adminCtx, repoScaleSet.ID) - if err != nil { - s.FailNow(fmt.Sprintf("failed to delete scaleset: %s", err)) - } - }) - }) - - s.T().Run("create enterprise scaleset", func(_ *testing.T) { - enterpriseScaleSet, err = s.Store.CreateEntityScaleSet(s.adminCtx, s.enterpriseEntity, createScaleSetPrams) - s.Require().NoError(err) - s.Require().NotNil(enterpriseScaleSet) - s.Require().Equal(enterpriseScaleSet.Name, createScaleSetPrams.Name) - - s.T().Cleanup(func() { - err := s.Store.DeleteScaleSetByID(s.adminCtx, enterpriseScaleSet.ID) - if err != nil { - s.FailNow(fmt.Sprintf("failed to delete scaleset: %s", err)) - } - }) - }) - - s.T().Run("create list all scalesets", func(_ *testing.T) { - allScaleSets, err := s.Store.ListAllScaleSets(s.adminCtx) - s.Require().NoError(err) - s.Require().NotEmpty(allScaleSets) - s.Require().Len(allScaleSets, 3) - }) - - s.T().Run("list repo scalesets", func(_ *testing.T) { - repoScaleSets, err := s.Store.ListEntityScaleSets(s.adminCtx, s.repoEntity) - s.Require().NoError(err) - s.Require().NotEmpty(repoScaleSets) - s.Require().Len(repoScaleSets, 1) - }) - - s.T().Run("list org scalesets", func(_ *testing.T) { - orgScaleSets, err := s.Store.ListEntityScaleSets(s.adminCtx, s.orgEntity) - s.Require().NoError(err) - s.Require().NotEmpty(orgScaleSets) - s.Require().Len(orgScaleSets, 1) - }) - - s.T().Run("list enterprise scalesets", func(_ *testing.T) { - enterpriseScaleSets, err := s.Store.ListEntityScaleSets(s.adminCtx, s.enterpriseEntity) - s.Require().NoError(err) - s.Require().NotEmpty(enterpriseScaleSets) - s.Require().Len(enterpriseScaleSets, 1) - }) - - s.T().Run("get repo scaleset by ID", func(_ *testing.T) { - repoScaleSetByID, err := s.Store.GetScaleSetByID(s.adminCtx, repoScaleSet.ID) - s.Require().NoError(err) - s.Require().NotNil(repoScaleSetByID) - s.Require().Equal(repoScaleSetByID.ID, repoScaleSet.ID) - s.Require().Equal(repoScaleSetByID.Name, repoScaleSet.Name) - }) - - s.T().Run("get org scaleset by ID", func(_ *testing.T) { - orgScaleSetByID, err := s.Store.GetScaleSetByID(s.adminCtx, orgScaleSet.ID) - s.Require().NoError(err) - s.Require().NotNil(orgScaleSetByID) - s.Require().Equal(orgScaleSetByID.ID, orgScaleSet.ID) - s.Require().Equal(orgScaleSetByID.Name, orgScaleSet.Name) - }) - - s.T().Run("get enterprise scaleset by ID", func(_ *testing.T) { - enterpriseScaleSetByID, err := s.Store.GetScaleSetByID(s.adminCtx, enterpriseScaleSet.ID) - s.Require().NoError(err) - s.Require().NotNil(enterpriseScaleSetByID) - s.Require().Equal(enterpriseScaleSetByID.ID, enterpriseScaleSet.ID) - s.Require().Equal(enterpriseScaleSetByID.Name, enterpriseScaleSet.Name) - }) - - s.T().Run("get scaleset by ID not found", func(_ *testing.T) { - _, err = s.Store.GetScaleSetByID(s.adminCtx, 999) - s.Require().Error(err) - s.Require().Contains(err.Error(), "not found") - }) - - s.T().Run("Set scale set last message ID and desired count", func(_ *testing.T) { - err = s.Store.SetScaleSetLastMessageID(s.adminCtx, orgScaleSet.ID, 20) - s.Require().NoError(err) - err = s.Store.SetScaleSetDesiredRunnerCount(s.adminCtx, orgScaleSet.ID, 5) - s.Require().NoError(err) - orgScaleSetByID, err := s.Store.GetScaleSetByID(s.adminCtx, orgScaleSet.ID) - s.Require().NoError(err) - s.Require().NotNil(orgScaleSetByID) - s.Require().Equal(orgScaleSetByID.LastMessageID, int64(20)) - s.Require().Equal(orgScaleSetByID.DesiredRunnerCount, 5) - }) - - updateParams := params.UpdateScaleSetParams{ - Name: "test-scaleset-updated", - RunnerPrefix: params.RunnerPrefix{ - Prefix: "test-prefix2", - }, - OSType: commonParams.Windows, - OSArch: commonParams.Arm64, - ExtraSpecs: json.RawMessage(`{"test": 111}`), - Enabled: garmTesting.Ptr(true), - MaxRunners: garmTesting.Ptr(uint(60)), - MinIdleRunners: garmTesting.Ptr(uint(50)), - Image: "new-test-image", - Flavor: "new-test-flavor", - GitHubRunnerGroup: garmTesting.Ptr("new-test-group"), - } - - s.T().Run("update repo scaleset", func(_ *testing.T) { - newRepoScaleSet, err := s.Store.UpdateEntityScaleSet(s.adminCtx, s.repoEntity, repoScaleSet.ID, updateParams, s.callback) - s.Require().NoError(err) - s.Require().NotNil(newRepoScaleSet) - s.Require().NoError(s.callback(repoScaleSet, newRepoScaleSet)) - }) - - s.T().Run("update org scaleset", func(_ *testing.T) { - newOrgScaleSet, err := s.Store.UpdateEntityScaleSet(s.adminCtx, s.orgEntity, orgScaleSet.ID, updateParams, s.callback) - s.Require().NoError(err) - s.Require().NotNil(newOrgScaleSet) - s.Require().NoError(s.callback(orgScaleSet, newOrgScaleSet)) - }) - - s.T().Run("update enterprise scaleset", func(_ *testing.T) { - newEnterpriseScaleSet, err := s.Store.UpdateEntityScaleSet(s.adminCtx, s.enterpriseEntity, enterpriseScaleSet.ID, updateParams, s.callback) - s.Require().NoError(err) - s.Require().NotNil(newEnterpriseScaleSet) - s.Require().NoError(s.callback(enterpriseScaleSet, newEnterpriseScaleSet)) - }) - - s.T().Run("update scaleset not found", func(_ *testing.T) { - _, err = s.Store.UpdateEntityScaleSet(s.adminCtx, s.enterpriseEntity, 99999, updateParams, s.callback) - s.Require().Error(err) - s.Require().Contains(err.Error(), "not found") - }) - - s.T().Run("update scaleset with invalid entity", func(_ *testing.T) { - _, err = s.Store.UpdateEntityScaleSet(s.adminCtx, params.ForgeEntity{}, enterpriseScaleSet.ID, params.UpdateScaleSetParams{}, nil) - s.Require().Error(err) - s.Require().Contains(err.Error(), "missing entity id") - }) - - s.T().Run("Create repo scale set instance", func(_ *testing.T) { - param := params.CreateInstanceParams{ - Name: "test-instance", - Status: commonParams.InstancePendingCreate, - RunnerStatus: params.RunnerPending, - OSType: commonParams.Linux, - OSArch: commonParams.Amd64, - CallbackURL: "http://localhost:8080/callback", - MetadataURL: "http://localhost:8080/metadata", - GitHubRunnerGroup: "test-group", - JitConfiguration: map[string]string{ - "test": "test", - }, - AgentID: 5, - } - - instance, err := s.Store.CreateScaleSetInstance(s.adminCtx, repoScaleSet.ID, param) - s.Require().NoError(err) - s.Require().NotNil(instance) - s.Require().Equal(instance.Name, param.Name) - s.Require().Equal(instance.Status, param.Status) - s.Require().Equal(instance.RunnerStatus, param.RunnerStatus) - s.Require().Equal(instance.OSType, param.OSType) - s.Require().Equal(instance.OSArch, param.OSArch) - s.Require().Equal(instance.CallbackURL, param.CallbackURL) - s.Require().Equal(instance.MetadataURL, param.MetadataURL) - s.Require().Equal(instance.GitHubRunnerGroup, param.GitHubRunnerGroup) - s.Require().Equal(instance.JitConfiguration, param.JitConfiguration) - s.Require().Equal(instance.AgentID, param.AgentID) - - s.T().Cleanup(func() { - err := s.Store.DeleteInstanceByName(s.adminCtx, instance.Name) - if err != nil { - s.FailNow(fmt.Sprintf("failed to delete scaleset instance: %s", err)) - } - }) - }) - - s.T().Run("List repo scale set instances", func(_ *testing.T) { - instances, err := s.Store.ListScaleSetInstances(s.adminCtx, repoScaleSet.ID) - s.Require().NoError(err) - s.Require().NotEmpty(instances) - s.Require().Len(instances, 1) - }) -} - -func TestScaleSetsTestSuite(t *testing.T) { - suite.Run(t, new(ScaleSetsTestSuite)) -} diff --git a/database/sql/sql.go b/database/sql/sql.go index 7d1fc96c..290cce3f 100644 --- a/database/sql/sql.go +++ b/database/sql/sql.go @@ -16,12 +16,13 @@ package sql import ( "context" - "errors" "fmt" "log/slog" "net/url" "strings" + "sync" + "github.com/pkg/errors" "gorm.io/driver/mysql" "gorm.io/driver/sqlite" "gorm.io/gorm" @@ -36,17 +37,11 @@ import ( "github.com/cloudbase/garm/util/appdefaults" ) -const ( - repositoryFieldName string = "Repository" - organizationFieldName string = "Organization" - enterpriseFieldName string = "Enterprise" -) - // newDBConn returns a new gorm db connection, given the config func newDBConn(dbCfg config.Database) (conn *gorm.DB, err error) { dbType, connURI, err := dbCfg.GormParams() if err != nil { - return nil, fmt.Errorf("error getting DB URI string: %w", err) + return nil, errors.Wrap(err, "getting DB URI string") } gormConfig := &gorm.Config{} @@ -61,7 +56,7 @@ func newDBConn(dbCfg config.Database) (conn *gorm.DB, err error) { conn, err = gorm.Open(sqlite.Open(connURI), gormConfig) } if err != nil { - return nil, fmt.Errorf("error connecting to database: %w", err) + return nil, errors.Wrap(err, "connecting to database") } if dbCfg.Debug { @@ -73,11 +68,11 @@ func newDBConn(dbCfg config.Database) (conn *gorm.DB, err error) { func NewSQLDatabase(ctx context.Context, cfg config.Database) (common.Store, error) { conn, err := newDBConn(cfg) if err != nil { - return nil, fmt.Errorf("error creating DB connection: %w", err) + return nil, errors.Wrap(err, "creating DB connection") } producer, err := watcher.RegisterProducer(ctx, "sql") if err != nil { - return nil, fmt.Errorf("error registering producer: %w", err) + return nil, errors.Wrap(err, "registering producer") } db := &sqlDatabase{ conn: conn, @@ -87,7 +82,7 @@ func NewSQLDatabase(ctx context.Context, cfg config.Database) (common.Store, err } if err := db.migrateDB(); err != nil { - return nil, fmt.Errorf("error migrating database: %w", err) + return nil, errors.Wrap(err, "migrating database") } return db, nil } @@ -97,6 +92,11 @@ type sqlDatabase struct { ctx context.Context cfg config.Database producer common.Producer + + // while busy_timeout helps, in situations of high contention, we can still + // end up with multiple threads trying to write to the database. SQLite does now + // support row level locking. + writeMux sync.Mutex } var renameTemplate = ` @@ -221,14 +221,14 @@ func (s *sqlDatabase) ensureGithubEndpoint() error { var epCount int64 if err := s.conn.Model(&GithubEndpoint{}).Count(&epCount).Error; err != nil { if !errors.Is(err, gorm.ErrRecordNotFound) { - return fmt.Errorf("error counting github endpoints: %w", err) + return errors.Wrap(err, "counting github endpoints") } } if epCount == 0 { if _, err := s.CreateGithubEndpoint(context.Background(), createEndpointParams); err != nil { if !errors.Is(err, runnerErrors.ErrDuplicateEntity) { - return fmt.Errorf("error creating default github endpoint: %w", err) + return errors.Wrap(err, "creating default github endpoint") } } } @@ -246,7 +246,7 @@ func (s *sqlDatabase) migrateCredentialsToDB() (err error) { // Admin user doesn't exist. This is a new deploy. Nothing to migrate. return nil } - return fmt.Errorf("error getting admin user: %w", err) + return errors.Wrap(err, "getting admin user") } // Impersonate the admin user. We're migrating from config credentials to @@ -259,7 +259,7 @@ func (s *sqlDatabase) migrateCredentialsToDB() (err error) { slog.Info("migrating credentials to DB") slog.Info("creating github endpoints table") if err := s.conn.AutoMigrate(&GithubEndpoint{}); err != nil { - return fmt.Errorf("error migrating github endpoints: %w", err) + return errors.Wrap(err, "migrating github endpoints") } defer func() { @@ -271,7 +271,7 @@ func (s *sqlDatabase) migrateCredentialsToDB() (err error) { slog.Info("creating github credentials table") if err := s.conn.AutoMigrate(&GithubCredentials{}); err != nil { - return fmt.Errorf("error migrating github credentials: %w", err) + return errors.Wrap(err, "migrating github credentials") } defer func() { @@ -291,12 +291,12 @@ func (s *sqlDatabase) migrateCredentialsToDB() (err error) { slog.Info("importing credential", "name", cred.Name) parsed, err := url.Parse(cred.BaseEndpoint()) if err != nil { - return fmt.Errorf("error parsing base URL: %w", err) + return errors.Wrap(err, "parsing base URL") } certBundle, err := cred.CACertBundle() if err != nil { - return fmt.Errorf("error getting CA cert bundle: %w", err) + return errors.Wrap(err, "getting CA cert bundle") } hostname := parsed.Hostname() createParams := params.CreateGithubEndpointParams{ @@ -308,15 +308,15 @@ func (s *sqlDatabase) migrateCredentialsToDB() (err error) { CACertBundle: certBundle, } - var endpoint params.ForgeEndpoint + var endpoint params.GithubEndpoint endpoint, err = s.GetGithubEndpoint(adminCtx, hostname) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { - return fmt.Errorf("error getting github endpoint: %w", err) + return errors.Wrap(err, "getting github endpoint") } endpoint, err = s.CreateGithubEndpoint(adminCtx, createParams) if err != nil { - return fmt.Errorf("error creating default github endpoint: %w", err) + return errors.Wrap(err, "creating default github endpoint") } } @@ -324,13 +324,13 @@ func (s *sqlDatabase) migrateCredentialsToDB() (err error) { Name: cred.Name, Description: cred.Description, Endpoint: endpoint.Name, - AuthType: params.ForgeAuthType(cred.GetAuthType()), + AuthType: params.GithubAuthType(cred.GetAuthType()), } switch credParams.AuthType { - case params.ForgeAuthTypeApp: + case params.GithubAuthTypeApp: keyBytes, err := cred.App.PrivateKeyBytes() if err != nil { - return fmt.Errorf("error getting private key bytes: %w", err) + return errors.Wrap(err, "getting private key bytes") } credParams.App = params.GithubApp{ AppID: cred.App.AppID, @@ -339,9 +339,9 @@ func (s *sqlDatabase) migrateCredentialsToDB() (err error) { } if err := credParams.App.Validate(); err != nil { - return fmt.Errorf("error validating app credentials: %w", err) + return errors.Wrap(err, "validating app credentials") } - case params.ForgeAuthTypePAT: + case params.GithubAuthTypePAT: token := cred.PAT.OAuth2Token if token == "" { token = cred.OAuth2Token @@ -356,35 +356,19 @@ func (s *sqlDatabase) migrateCredentialsToDB() (err error) { creds, err := s.CreateGithubCredentials(adminCtx, credParams) if err != nil { - return fmt.Errorf("error creating github credentials: %w", err) + return errors.Wrap(err, "creating github credentials") } if err := s.conn.Exec("update repositories set credentials_id = ?,endpoint_name = ? where credentials_name = ?", creds.ID, creds.Endpoint.Name, creds.Name).Error; err != nil { - return fmt.Errorf("error updating repositories: %w", err) + return errors.Wrap(err, "updating repositories") } if err := s.conn.Exec("update organizations set credentials_id = ?,endpoint_name = ? where credentials_name = ?", creds.ID, creds.Endpoint.Name, creds.Name).Error; err != nil { - return fmt.Errorf("error updating organizations: %w", err) + return errors.Wrap(err, "updating organizations") } if err := s.conn.Exec("update enterprises set credentials_id = ?,endpoint_name = ? where credentials_name = ?", creds.ID, creds.Endpoint.Name, creds.Name).Error; err != nil { - return fmt.Errorf("error updating enterprises: %w", err) - } - } - return nil -} - -func (s *sqlDatabase) migrateWorkflow() error { - if s.conn.Migrator().HasTable(&WorkflowJob{}) { - if s.conn.Migrator().HasColumn(&WorkflowJob{}, "runner_name") { - // Remove jobs that are not in "queued" status. We really only care about queued jobs. Once they transition - // to something else, we don't really consume them anyway. - if err := s.conn.Exec("delete from workflow_jobs where status is not 'queued'").Error; err != nil { - return fmt.Errorf("error updating workflow_jobs: %w", err) - } - if err := s.conn.Migrator().DropColumn(&WorkflowJob{}, "runner_name"); err != nil { - return fmt.Errorf("error updating workflow_jobs: %w", err) - } + return errors.Wrap(err, "updating enterprises") } } return nil @@ -404,34 +388,32 @@ func (s *sqlDatabase) migrateDB() error { } if err := s.cascadeMigration(); err != nil { - return fmt.Errorf("error running cascade migration: %w", err) + return errors.Wrap(err, "running cascade migration") } if s.conn.Migrator().HasTable(&Pool{}) { if err := s.conn.Exec("update pools set repo_id=NULL where repo_id='00000000-0000-0000-0000-000000000000'").Error; err != nil { - return fmt.Errorf("error updating pools %w", err) + return errors.Wrap(err, "updating pools") } if err := s.conn.Exec("update pools set org_id=NULL where org_id='00000000-0000-0000-0000-000000000000'").Error; err != nil { - return fmt.Errorf("error updating pools: %w", err) + return errors.Wrap(err, "updating pools") } if err := s.conn.Exec("update pools set enterprise_id=NULL where enterprise_id='00000000-0000-0000-0000-000000000000'").Error; err != nil { - return fmt.Errorf("error updating pools: %w", err) + return errors.Wrap(err, "updating pools") } } - if err := s.migrateWorkflow(); err != nil { - return fmt.Errorf("error migrating workflows: %w", err) - } - - if s.conn.Migrator().HasTable(&GithubEndpoint{}) { - if !s.conn.Migrator().HasColumn(&GithubEndpoint{}, "endpoint_type") { - if err := s.conn.Migrator().AutoMigrate(&GithubEndpoint{}); err != nil { - return fmt.Errorf("error migrating github endpoints: %w", err) + if s.conn.Migrator().HasTable(&WorkflowJob{}) { + if s.conn.Migrator().HasColumn(&WorkflowJob{}, "runner_name") { + // Remove jobs that are not in "queued" status. We really only care about queued jobs. Once they transition + // to something else, we don't really consume them anyway. + if err := s.conn.Exec("delete from workflow_jobs where status is not 'queued'").Error; err != nil { + return errors.Wrap(err, "updating workflow_jobs") } - if err := s.conn.Exec("update github_endpoints set endpoint_type = 'github' where endpoint_type is null").Error; err != nil { - return fmt.Errorf("error updating github endpoints: %w", err) + if err := s.conn.Migrator().DropColumn(&WorkflowJob{}, "runner_name"); err != nil { + return errors.Wrap(err, "updating workflow_jobs") } } } @@ -451,23 +433,18 @@ func (s *sqlDatabase) migrateDB() error { &User{}, &GithubEndpoint{}, &GithubCredentials{}, - &GiteaCredentials{}, &Tag{}, &Pool{}, &Repository{}, &Organization{}, &Enterprise{}, - &EnterpriseEvent{}, - &OrganizationEvent{}, - &RepositoryEvent{}, &Address{}, &InstanceStatusUpdate{}, &Instance{}, &ControllerInfo{}, &WorkflowJob{}, - &ScaleSet{}, ); err != nil { - return fmt.Errorf("error running auto migrate: %w", err) + return errors.Wrap(err, "running auto migrate") } s.conn.Exec("PRAGMA foreign_keys = ON") @@ -475,23 +452,23 @@ func (s *sqlDatabase) migrateDB() error { var controller ControllerInfo if err := s.conn.First(&controller).Error; err != nil { if !errors.Is(err, gorm.ErrRecordNotFound) { - return fmt.Errorf("error updating controller info: %w", err) + return errors.Wrap(err, "updating controller info") } } else { controller.MinimumJobAgeBackoff = 30 if err := s.conn.Save(&controller).Error; err != nil { - return fmt.Errorf("error updating controller info: %w", err) + return errors.Wrap(err, "updating controller info") } } } if err := s.ensureGithubEndpoint(); err != nil { - return fmt.Errorf("error ensuring github endpoint: %w", err) + return errors.Wrap(err, "ensuring github endpoint") } if needsCredentialMigration { if err := s.migrateCredentialsToDB(); err != nil { - return fmt.Errorf("error migrating credentials: %w", err) + return errors.Wrap(err, "migrating credentials") } } return nil diff --git a/database/sql/users.go b/database/sql/users.go index ca78c5e8..6bc0973f 100644 --- a/database/sql/users.go +++ b/database/sql/users.go @@ -16,9 +16,9 @@ package sql import ( "context" - "errors" "fmt" + "github.com/pkg/errors" "gorm.io/gorm" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -39,7 +39,7 @@ func (s *sqlDatabase) getUserByUsernameOrEmail(tx *gorm.DB, user string) (User, if errors.Is(q.Error, gorm.ErrRecordNotFound) { return User{}, runnerErrors.ErrNotFound } - return User{}, fmt.Errorf("error fetching user: %w", q.Error) + return User{}, errors.Wrap(q.Error, "fetching user") } return dbUser, nil } @@ -51,12 +51,15 @@ func (s *sqlDatabase) getUserByID(tx *gorm.DB, userID string) (User, error) { if errors.Is(q.Error, gorm.ErrRecordNotFound) { return User{}, runnerErrors.ErrNotFound } - return User{}, fmt.Errorf("error fetching user: %w", q.Error) + return User{}, errors.Wrap(q.Error, "fetching user") } return dbUser, nil } func (s *sqlDatabase) CreateUser(_ context.Context, user params.NewUserParams) (params.User, error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + if user.Username == "" || user.Email == "" || user.Password == "" { return params.User{}, runnerErrors.NewBadRequestError("missing username, password or email") } @@ -82,12 +85,12 @@ func (s *sqlDatabase) CreateUser(_ context.Context, user params.NewUserParams) ( q := tx.Save(&newUser) if q.Error != nil { - return fmt.Errorf("error creating user: %w", q.Error) + return errors.Wrap(q.Error, "creating user") } return nil }) if err != nil { - return params.User{}, fmt.Errorf("error creating user: %w", err) + return params.User{}, errors.Wrap(err, "creating user") } return s.sqlToParamsUser(newUser), nil } @@ -105,7 +108,7 @@ func (s *sqlDatabase) HasAdminUser(_ context.Context) bool { func (s *sqlDatabase) GetUser(_ context.Context, user string) (params.User, error) { dbUser, err := s.getUserByUsernameOrEmail(s.conn, user) if err != nil { - return params.User{}, fmt.Errorf("error fetching user: %w", err) + return params.User{}, errors.Wrap(err, "fetching user") } return s.sqlToParamsUser(dbUser), nil } @@ -113,18 +116,21 @@ func (s *sqlDatabase) GetUser(_ context.Context, user string) (params.User, erro func (s *sqlDatabase) GetUserByID(_ context.Context, userID string) (params.User, error) { dbUser, err := s.getUserByID(s.conn, userID) if err != nil { - return params.User{}, fmt.Errorf("error fetching user: %w", err) + return params.User{}, errors.Wrap(err, "fetching user") } return s.sqlToParamsUser(dbUser), nil } func (s *sqlDatabase) UpdateUser(_ context.Context, user string, param params.UpdateUserParams) (params.User, error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + var err error var dbUser User err = s.conn.Transaction(func(tx *gorm.DB) error { dbUser, err = s.getUserByUsernameOrEmail(tx, user) if err != nil { - return fmt.Errorf("error fetching user: %w", err) + return errors.Wrap(err, "fetching user") } if param.FullName != "" { @@ -141,12 +147,12 @@ func (s *sqlDatabase) UpdateUser(_ context.Context, user string, param params.Up } if q := tx.Save(&dbUser); q.Error != nil { - return fmt.Errorf("error saving user: %w", q.Error) + return errors.Wrap(q.Error, "saving user") } return nil }) if err != nil { - return params.User{}, fmt.Errorf("error updating user: %w", err) + return params.User{}, errors.Wrap(err, "updating user") } return s.sqlToParamsUser(dbUser), nil } @@ -159,7 +165,7 @@ func (s *sqlDatabase) GetAdminUser(_ context.Context) (params.User, error) { if errors.Is(q.Error, gorm.ErrRecordNotFound) { return params.User{}, runnerErrors.ErrNotFound } - return params.User{}, fmt.Errorf("error fetching admin user: %w", q.Error) + return params.User{}, errors.Wrap(q.Error, "fetching admin user") } return s.sqlToParamsUser(user), nil } diff --git a/database/sql/users_test.go b/database/sql/users_test.go index 369abff3..627c4b93 100644 --- a/database/sql/users_test.go +++ b/database/sql/users_test.go @@ -28,7 +28,6 @@ import ( "gorm.io/gorm/logger" dbCommon "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/database/watcher" garmTesting "github.com/cloudbase/garm/internal/testing" "github.com/cloudbase/garm/params" ) @@ -54,13 +53,7 @@ func (s *UserTestSuite) assertSQLMockExpectations() { } } -func (s *UserTestSuite) TearDownTest() { - watcher.CloseWatcher() -} - func (s *UserTestSuite) SetupTest() { - ctx := context.Background() - watcher.InitWatcher(ctx) // create testing sqlite database db, err := NewSQLDatabase(context.Background(), garmTesting.GetTestSqliteDBConfig(s.T())) if err != nil { @@ -161,7 +154,7 @@ func (s *UserTestSuite) TestCreateUserUsernameAlreadyExist() { _, err := s.Store.CreateUser(context.Background(), s.Fixtures.NewUserParams) s.Require().NotNil(err) - s.Require().Equal(("error creating user: username already exists"), err.Error()) + s.Require().Equal(("creating user: username already exists"), err.Error()) } func (s *UserTestSuite) TestCreateUserEmailAlreadyExist() { @@ -170,7 +163,7 @@ func (s *UserTestSuite) TestCreateUserEmailAlreadyExist() { _, err := s.Store.CreateUser(context.Background(), s.Fixtures.NewUserParams) s.Require().NotNil(err) - s.Require().Equal(("error creating user: email already exists"), err.Error()) + s.Require().Equal(("creating user: email already exists"), err.Error()) } func (s *UserTestSuite) TestCreateUserDBCreateErr() { @@ -191,7 +184,7 @@ func (s *UserTestSuite) TestCreateUserDBCreateErr() { _, err := s.StoreSQLMocked.CreateUser(context.Background(), s.Fixtures.NewUserParams) s.Require().NotNil(err) - s.Require().Equal("error creating user: error creating user: creating user mock error", err.Error()) + s.Require().Equal("creating user: creating user: creating user mock error", err.Error()) s.assertSQLMockExpectations() } @@ -230,7 +223,7 @@ func (s *UserTestSuite) TestGetUserNotFound() { _, err := s.Store.GetUser(context.Background(), "dummy-user") s.Require().NotNil(err) - s.Require().Equal("error fetching user: not found", err.Error()) + s.Require().Equal("fetching user: not found", err.Error()) } func (s *UserTestSuite) TestGetUserByID() { @@ -244,7 +237,7 @@ func (s *UserTestSuite) TestGetUserByIDNotFound() { _, err := s.Store.GetUserByID(context.Background(), "dummy-user-id") s.Require().NotNil(err) - s.Require().Equal("error fetching user: not found", err.Error()) + s.Require().Equal("fetching user: not found", err.Error()) } func (s *UserTestSuite) TestUpdateUser() { @@ -260,7 +253,7 @@ func (s *UserTestSuite) TestUpdateUserNotFound() { _, err := s.Store.UpdateUser(context.Background(), "dummy-user", s.Fixtures.UpdateUserParams) s.Require().NotNil(err) - s.Require().Equal("error updating user: error fetching user: not found", err.Error()) + s.Require().Equal("updating user: fetching user: not found", err.Error()) } func (s *UserTestSuite) TestUpdateUserDBSaveErr() { @@ -278,7 +271,7 @@ func (s *UserTestSuite) TestUpdateUserDBSaveErr() { s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("error updating user: error saving user: saving user mock error", err.Error()) + s.Require().Equal("updating user: saving user: saving user mock error", err.Error()) } func TestUserTestSuite(t *testing.T) { diff --git a/database/sql/util.go b/database/sql/util.go index 9509aacf..cc2bbcb9 100644 --- a/database/sql/util.go +++ b/database/sql/util.go @@ -15,19 +15,17 @@ package sql import ( - "context" "encoding/json" - "errors" "fmt" "github.com/google/uuid" + "github.com/pkg/errors" "gorm.io/datatypes" "gorm.io/gorm" runnerErrors "github.com/cloudbase/garm-provider-common/errors" commonParams "github.com/cloudbase/garm-provider-common/params" "github.com/cloudbase/garm-provider-common/util" - "github.com/cloudbase/garm/auth" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" ) @@ -41,14 +39,14 @@ func (s *sqlDatabase) sqlToParamsInstance(instance Instance) (params.Instance, e var labels []string if len(instance.AditionalLabels) > 0 { if err := json.Unmarshal(instance.AditionalLabels, &labels); err != nil { - return params.Instance{}, fmt.Errorf("error unmarshalling labels: %w", err) + return params.Instance{}, errors.Wrap(err, "unmarshalling labels") } } var jitConfig map[string]string if len(instance.JitConfiguration) > 0 { if err := s.unsealAndUnmarshal(instance.JitConfiguration, &jitConfig); err != nil { - return params.Instance{}, fmt.Errorf("error unmarshalling jit configuration: %w", err) + return params.Instance{}, errors.Wrap(err, "unmarshalling jit configuration") } } ret := params.Instance{ @@ -62,6 +60,7 @@ func (s *sqlDatabase) sqlToParamsInstance(instance Instance) (params.Instance, e OSArch: instance.OSArch, Status: instance.Status, RunnerStatus: instance.RunnerStatus, + PoolID: instance.PoolID.String(), CallbackURL: instance.CallbackURL, MetadataURL: instance.MetadataURL, StatusMessages: []params.StatusMessage{}, @@ -74,28 +73,10 @@ func (s *sqlDatabase) sqlToParamsInstance(instance Instance) (params.Instance, e AditionalLabels: labels, } - if instance.ScaleSetFkID != nil { - ret.ScaleSetID = *instance.ScaleSetFkID - ret.ProviderName = instance.ScaleSet.ProviderName - } - - if instance.PoolID != nil { - ret.PoolID = instance.PoolID.String() - ret.ProviderName = instance.Pool.ProviderName - } - - if ret.ScaleSetID == 0 && ret.PoolID == "" { - return params.Instance{}, errors.New("missing pool or scale set id") - } - - if ret.ScaleSetID != 0 && ret.PoolID != "" { - return params.Instance{}, errors.New("both pool and scale set ids are set") - } - if instance.Job != nil { paramJob, err := sqlWorkflowJobToParamsJob(*instance.Job) if err != nil { - return params.Instance{}, fmt.Errorf("error converting job: %w", err) + return params.Instance{}, errors.Wrap(err, "converting job") } ret.Job = ¶mJob } @@ -132,12 +113,12 @@ func (s *sqlDatabase) sqlToCommonOrganization(org Organization, detailed bool) ( } secret, err := util.Unseal(org.WebhookSecret, []byte(s.cfg.Passphrase)) if err != nil { - return params.Organization{}, fmt.Errorf("error decrypting secret: %w", err) + return params.Organization{}, errors.Wrap(err, "decrypting secret") } endpoint, err := s.sqlToCommonGithubEndpoint(org.Endpoint) if err != nil { - return params.Organization{}, fmt.Errorf("error converting endpoint: %w", err) + return params.Organization{}, errors.Wrap(err, "converting endpoint") } ret := params.Organization{ ID: org.ID.String(), @@ -151,37 +132,16 @@ func (s *sqlDatabase) sqlToCommonOrganization(org Organization, detailed bool) ( UpdatedAt: org.UpdatedAt, } - var forgeCreds params.ForgeCredentials if org.CredentialsID != nil { ret.CredentialsID = *org.CredentialsID - forgeCreds, err = s.sqlToCommonForgeCredentials(org.Credentials) - } - - if org.GiteaCredentialsID != nil { - ret.CredentialsID = *org.GiteaCredentialsID - forgeCreds, err = s.sqlGiteaToCommonForgeCredentials(org.GiteaCredentials) - } - - if err != nil { - return params.Organization{}, fmt.Errorf("error converting credentials: %w", err) - } - - if len(org.Events) > 0 { - ret.Events = make([]params.EntityEvent, len(org.Events)) - for idx, event := range org.Events { - ret.Events[idx] = params.EntityEvent{ - ID: event.ID, - Message: event.Message, - EventType: event.EventType, - EventLevel: event.EventLevel, - CreatedAt: event.CreatedAt, - } - } } if detailed { - ret.Credentials = forgeCreds - ret.CredentialsName = forgeCreds.Name + creds, err := s.sqlToCommonGithubCredentials(org.Credentials) + if err != nil { + return params.Organization{}, errors.Wrap(err, "converting credentials") + } + ret.Credentials = creds } if ret.PoolBalancerType == "" { @@ -191,7 +151,7 @@ func (s *sqlDatabase) sqlToCommonOrganization(org Organization, detailed bool) ( for idx, pool := range org.Pools { ret.Pools[idx], err = s.sqlToCommonPool(pool) if err != nil { - return params.Organization{}, fmt.Errorf("error converting pool: %w", err) + return params.Organization{}, errors.Wrap(err, "converting pool") } } @@ -204,12 +164,12 @@ func (s *sqlDatabase) sqlToCommonEnterprise(enterprise Enterprise, detailed bool } secret, err := util.Unseal(enterprise.WebhookSecret, []byte(s.cfg.Passphrase)) if err != nil { - return params.Enterprise{}, fmt.Errorf("error decrypting secret: %w", err) + return params.Enterprise{}, errors.Wrap(err, "decrypting secret") } endpoint, err := s.sqlToCommonGithubEndpoint(enterprise.Endpoint) if err != nil { - return params.Enterprise{}, fmt.Errorf("error converting endpoint: %w", err) + return params.Enterprise{}, errors.Wrap(err, "converting endpoint") } ret := params.Enterprise{ ID: enterprise.ID.String(), @@ -227,23 +187,10 @@ func (s *sqlDatabase) sqlToCommonEnterprise(enterprise Enterprise, detailed bool ret.CredentialsID = *enterprise.CredentialsID } - if len(enterprise.Events) > 0 { - ret.Events = make([]params.EntityEvent, len(enterprise.Events)) - for idx, event := range enterprise.Events { - ret.Events[idx] = params.EntityEvent{ - ID: event.ID, - Message: event.Message, - EventType: event.EventType, - EventLevel: event.EventLevel, - CreatedAt: event.CreatedAt, - } - } - } - if detailed { - creds, err := s.sqlToCommonForgeCredentials(enterprise.Credentials) + creds, err := s.sqlToCommonGithubCredentials(enterprise.Credentials) if err != nil { - return params.Enterprise{}, fmt.Errorf("error converting credentials: %w", err) + return params.Enterprise{}, errors.Wrap(err, "converting credentials") } ret.Credentials = creds } @@ -255,7 +202,7 @@ func (s *sqlDatabase) sqlToCommonEnterprise(enterprise Enterprise, detailed bool for idx, pool := range enterprise.Pools { ret.Pools[idx], err = s.sqlToCommonPool(pool) if err != nil { - return params.Enterprise{}, fmt.Errorf("error converting pool: %w", err) + return params.Enterprise{}, errors.Wrap(err, "converting pool") } } @@ -286,108 +233,32 @@ func (s *sqlDatabase) sqlToCommonPool(pool Pool) (params.Pool, error) { UpdatedAt: pool.UpdatedAt, } - var ep GithubEndpoint if pool.RepoID != nil { ret.RepoID = pool.RepoID.String() if pool.Repository.Owner != "" && pool.Repository.Name != "" { ret.RepoName = fmt.Sprintf("%s/%s", pool.Repository.Owner, pool.Repository.Name) } - ep = pool.Repository.Endpoint } if pool.OrgID != nil && pool.Organization.Name != "" { ret.OrgID = pool.OrgID.String() ret.OrgName = pool.Organization.Name - ep = pool.Organization.Endpoint } if pool.EnterpriseID != nil && pool.Enterprise.Name != "" { ret.EnterpriseID = pool.EnterpriseID.String() ret.EnterpriseName = pool.Enterprise.Name - ep = pool.Enterprise.Endpoint } - endpoint, err := s.sqlToCommonGithubEndpoint(ep) - if err != nil { - return params.Pool{}, fmt.Errorf("error converting endpoint: %w", err) - } - ret.Endpoint = endpoint - for idx, val := range pool.Tags { ret.Tags[idx] = s.sqlToCommonTags(*val) } + var err error for idx, inst := range pool.Instances { ret.Instances[idx], err = s.sqlToParamsInstance(inst) if err != nil { - return params.Pool{}, fmt.Errorf("error converting instance: %w", err) - } - } - - return ret, nil -} - -func (s *sqlDatabase) sqlToCommonScaleSet(scaleSet ScaleSet) (params.ScaleSet, error) { - ret := params.ScaleSet{ - ID: scaleSet.ID, - CreatedAt: scaleSet.CreatedAt, - UpdatedAt: scaleSet.UpdatedAt, - ScaleSetID: scaleSet.ScaleSetID, - Name: scaleSet.Name, - DisableUpdate: scaleSet.DisableUpdate, - - ProviderName: scaleSet.ProviderName, - MaxRunners: scaleSet.MaxRunners, - MinIdleRunners: scaleSet.MinIdleRunners, - RunnerPrefix: params.RunnerPrefix{ - Prefix: scaleSet.RunnerPrefix, - }, - Image: scaleSet.Image, - Flavor: scaleSet.Flavor, - OSArch: scaleSet.OSArch, - OSType: scaleSet.OSType, - Enabled: scaleSet.Enabled, - Instances: make([]params.Instance, len(scaleSet.Instances)), - RunnerBootstrapTimeout: scaleSet.RunnerBootstrapTimeout, - ExtraSpecs: json.RawMessage(scaleSet.ExtraSpecs), - GitHubRunnerGroup: scaleSet.GitHubRunnerGroup, - State: scaleSet.State, - ExtendedState: scaleSet.ExtendedState, - LastMessageID: scaleSet.LastMessageID, - DesiredRunnerCount: scaleSet.DesiredRunnerCount, - } - - var ep GithubEndpoint - if scaleSet.RepoID != nil { - ret.RepoID = scaleSet.RepoID.String() - if scaleSet.Repository.Owner != "" && scaleSet.Repository.Name != "" { - ret.RepoName = fmt.Sprintf("%s/%s", scaleSet.Repository.Owner, scaleSet.Repository.Name) - } - ep = scaleSet.Repository.Endpoint - } - - if scaleSet.OrgID != nil { - ret.OrgID = scaleSet.OrgID.String() - ret.OrgName = scaleSet.Organization.Name - ep = scaleSet.Organization.Endpoint - } - - if scaleSet.EnterpriseID != nil { - ret.EnterpriseID = scaleSet.EnterpriseID.String() - ret.EnterpriseName = scaleSet.Enterprise.Name - ep = scaleSet.Enterprise.Endpoint - } - - endpoint, err := s.sqlToCommonGithubEndpoint(ep) - if err != nil { - return params.ScaleSet{}, fmt.Errorf("error converting endpoint: %w", err) - } - ret.Endpoint = endpoint - - for idx, inst := range scaleSet.Instances { - ret.Instances[idx], err = s.sqlToParamsInstance(inst) - if err != nil { - return params.ScaleSet{}, fmt.Errorf("error converting instance: %w", err) + return params.Pool{}, errors.Wrap(err, "converting instance") } } @@ -407,11 +278,11 @@ func (s *sqlDatabase) sqlToCommonRepository(repo Repository, detailed bool) (par } secret, err := util.Unseal(repo.WebhookSecret, []byte(s.cfg.Passphrase)) if err != nil { - return params.Repository{}, fmt.Errorf("error decrypting secret: %w", err) + return params.Repository{}, errors.Wrap(err, "decrypting secret") } endpoint, err := s.sqlToCommonGithubEndpoint(repo.Endpoint) if err != nil { - return params.Repository{}, fmt.Errorf("error converting endpoint: %w", err) + return params.Repository{}, errors.Wrap(err, "converting endpoint") } ret := params.Repository{ ID: repo.ID.String(), @@ -426,41 +297,16 @@ func (s *sqlDatabase) sqlToCommonRepository(repo Repository, detailed bool) (par Endpoint: endpoint, } - if repo.CredentialsID != nil && repo.GiteaCredentialsID != nil { - return params.Repository{}, runnerErrors.NewConflictError("both gitea and github credentials are set for repo %s", repo.Name) - } - - var forgeCreds params.ForgeCredentials if repo.CredentialsID != nil { ret.CredentialsID = *repo.CredentialsID - forgeCreds, err = s.sqlToCommonForgeCredentials(repo.Credentials) - } - - if repo.GiteaCredentialsID != nil { - ret.CredentialsID = *repo.GiteaCredentialsID - forgeCreds, err = s.sqlGiteaToCommonForgeCredentials(repo.GiteaCredentials) - } - - if err != nil { - return params.Repository{}, fmt.Errorf("error converting credentials: %w", err) - } - - if len(repo.Events) > 0 { - ret.Events = make([]params.EntityEvent, len(repo.Events)) - for idx, event := range repo.Events { - ret.Events[idx] = params.EntityEvent{ - ID: event.ID, - Message: event.Message, - EventType: event.EventType, - EventLevel: event.EventLevel, - CreatedAt: event.CreatedAt, - } - } } if detailed { - ret.Credentials = forgeCreds - ret.CredentialsName = forgeCreds.Name + creds, err := s.sqlToCommonGithubCredentials(repo.Credentials) + if err != nil { + return params.Repository{}, errors.Wrap(err, "converting credentials") + } + ret.Credentials = creds } if ret.PoolBalancerType == "" { @@ -470,7 +316,7 @@ func (s *sqlDatabase) sqlToCommonRepository(repo Repository, detailed bool) (par for idx, pool := range repo.Pools { ret.Pools[idx], err = s.sqlToCommonPool(pool) if err != nil { - return params.Repository{}, fmt.Errorf("error converting pool: %w", err) + return params.Repository{}, errors.Wrap(err, "converting pool") } } @@ -499,14 +345,14 @@ func (s *sqlDatabase) getOrCreateTag(tx *gorm.DB, tagName string) (Tag, error) { return tag, nil } if !errors.Is(q.Error, gorm.ErrRecordNotFound) { - return Tag{}, fmt.Errorf("error fetching tag from database: %w", q.Error) + return Tag{}, errors.Wrap(q.Error, "fetching tag from database") } newTag := Tag{ Name: tagName, } if err := tx.Create(&newTag).Error; err != nil { - return Tag{}, fmt.Errorf("error creating tag: %w", err) + return Tag{}, errors.Wrap(err, "creating tag") } return newTag, nil } @@ -561,7 +407,7 @@ func (s *sqlDatabase) updatePool(tx *gorm.DB, pool Pool, param params.UpdatePool } if q := tx.Save(&pool); q.Error != nil { - return params.Pool{}, fmt.Errorf("error saving database entry: %w", q.Error) + return params.Pool{}, errors.Wrap(q.Error, "saving database entry") } tags := []Tag{} @@ -569,13 +415,13 @@ func (s *sqlDatabase) updatePool(tx *gorm.DB, pool Pool, param params.UpdatePool for _, val := range param.Tags { t, err := s.getOrCreateTag(tx, val) if err != nil { - return params.Pool{}, fmt.Errorf("error fetching tag: %w", err) + return params.Pool{}, errors.Wrap(err, "fetching tag") } tags = append(tags, t) } if err := tx.Model(&pool).Association("Tags").Replace(&tags); err != nil { - return params.Pool{}, fmt.Errorf("error replacing tags: %w", err) + return params.Pool{}, errors.Wrap(err, "replacing tags") } } @@ -585,7 +431,7 @@ func (s *sqlDatabase) updatePool(tx *gorm.DB, pool Pool, param params.UpdatePool func (s *sqlDatabase) getPoolByID(tx *gorm.DB, poolID string, preload ...string) (Pool, error) { u, err := uuid.Parse(poolID) if err != nil { - return Pool{}, fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) + return Pool{}, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") } var pool Pool q := tx.Model(&Pool{}) @@ -601,54 +447,34 @@ func (s *sqlDatabase) getPoolByID(tx *gorm.DB, poolID string, preload ...string) if errors.Is(q.Error, gorm.ErrRecordNotFound) { return Pool{}, runnerErrors.ErrNotFound } - return Pool{}, fmt.Errorf("error fetching org from database: %w", q.Error) + return Pool{}, errors.Wrap(q.Error, "fetching org from database") } return pool, nil } -func (s *sqlDatabase) getScaleSetByID(tx *gorm.DB, scaleSetID uint, preload ...string) (ScaleSet, error) { - var scaleSet ScaleSet - q := tx.Model(&ScaleSet{}) - if len(preload) > 0 { - for _, item := range preload { - q = q.Preload(item) - } - } - - q = q.Where("id = ?", scaleSetID).First(&scaleSet) - - if q.Error != nil { - if errors.Is(q.Error, gorm.ErrRecordNotFound) { - return ScaleSet{}, runnerErrors.ErrNotFound - } - return ScaleSet{}, fmt.Errorf("error fetching scale set from database: %w", q.Error) - } - return scaleSet, nil -} - -func (s *sqlDatabase) hasGithubEntity(tx *gorm.DB, entityType params.ForgeEntityType, entityID string) error { +func (s *sqlDatabase) hasGithubEntity(tx *gorm.DB, entityType params.GithubEntityType, entityID string) error { u, err := uuid.Parse(entityID) if err != nil { - return fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) + return errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") } var q *gorm.DB switch entityType { - case params.ForgeEntityTypeRepository: + case params.GithubEntityTypeRepository: q = tx.Model(&Repository{}).Where("id = ?", u) - case params.ForgeEntityTypeOrganization: + case params.GithubEntityTypeOrganization: q = tx.Model(&Organization{}).Where("id = ?", u) - case params.ForgeEntityTypeEnterprise: + case params.GithubEntityTypeEnterprise: q = tx.Model(&Enterprise{}).Where("id = ?", u) default: - return fmt.Errorf("error invalid entity type: %w", runnerErrors.ErrBadRequest) + return errors.Wrap(runnerErrors.ErrBadRequest, "invalid entity type") } var entity interface{} if err := q.First(entity).Error; err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return fmt.Errorf("error entity not found: %w", runnerErrors.ErrNotFound) + return errors.Wrap(runnerErrors.ErrNotFound, "entity not found") } - return fmt.Errorf("error fetching entity from database: %w", err) + return errors.Wrap(err, "fetching entity from database") } return nil } @@ -656,7 +482,7 @@ func (s *sqlDatabase) hasGithubEntity(tx *gorm.DB, entityType params.ForgeEntity func (s *sqlDatabase) marshalAndSeal(data interface{}) ([]byte, error) { enc, err := json.Marshal(data) if err != nil { - return nil, fmt.Errorf("error marshalling data: %w", err) + return nil, errors.Wrap(err, "marshalling data") } return util.Seal(enc, []byte(s.cfg.Passphrase)) } @@ -664,10 +490,10 @@ func (s *sqlDatabase) marshalAndSeal(data interface{}) ([]byte, error) { func (s *sqlDatabase) unsealAndUnmarshal(data []byte, target interface{}) error { decrypted, err := util.Unseal(data, []byte(s.cfg.Passphrase)) if err != nil { - return fmt.Errorf("error decrypting data: %w", err) + return errors.Wrap(err, "decrypting data") } if err := json.Unmarshal(decrypted, target); err != nil { - return fmt.Errorf("error unmarshalling data: %w", err) + return errors.Wrap(err, "unmarshalling data") } return nil } @@ -687,279 +513,3 @@ func (s *sqlDatabase) sendNotify(entityType dbCommon.DatabaseEntityType, op dbCo } return s.producer.Notify(message) } - -func (s *sqlDatabase) GetForgeEntity(_ context.Context, entityType params.ForgeEntityType, entityID string) (params.ForgeEntity, error) { - var ghEntity params.EntityGetter - var err error - switch entityType { - case params.ForgeEntityTypeEnterprise: - ghEntity, err = s.GetEnterpriseByID(s.ctx, entityID) - case params.ForgeEntityTypeOrganization: - ghEntity, err = s.GetOrganizationByID(s.ctx, entityID) - case params.ForgeEntityTypeRepository: - ghEntity, err = s.GetRepositoryByID(s.ctx, entityID) - default: - return params.ForgeEntity{}, fmt.Errorf("error invalid entity type: %w", runnerErrors.ErrBadRequest) - } - if err != nil { - return params.ForgeEntity{}, fmt.Errorf("error failed to get entity from db: %w", err) - } - - entity, err := ghEntity.GetEntity() - if err != nil { - return params.ForgeEntity{}, fmt.Errorf("error failed to get entity: %w", err) - } - return entity, nil -} - -func (s *sqlDatabase) addRepositoryEvent(ctx context.Context, repoID string, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { - repo, err := s.getRepoByID(ctx, s.conn, repoID) - if err != nil { - return fmt.Errorf("error updating instance: %w", err) - } - - msg := RepositoryEvent{ - Message: statusMessage, - EventType: event, - EventLevel: eventLevel, - } - - if err := s.conn.Model(&repo).Association("Events").Append(&msg); err != nil { - return fmt.Errorf("error adding status message: %w", err) - } - - if maxEvents > 0 { - var latestEvents []RepositoryEvent - q := s.conn.Model(&RepositoryEvent{}). - Limit(maxEvents).Order("id desc"). - Where("repo_id = ?", repo.ID).Find(&latestEvents) - if q.Error != nil { - return fmt.Errorf("error fetching latest events: %w", q.Error) - } - if len(latestEvents) == maxEvents { - lastInList := latestEvents[len(latestEvents)-1] - if err := s.conn.Where("repo_id = ? and id < ?", repo.ID, lastInList.ID).Unscoped().Delete(&RepositoryEvent{}).Error; err != nil { - return fmt.Errorf("error deleting old events: %w", err) - } - } - } - return nil -} - -func (s *sqlDatabase) addOrgEvent(ctx context.Context, orgID string, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { - org, err := s.getOrgByID(ctx, s.conn, orgID) - if err != nil { - return fmt.Errorf("error updating instance: %w", err) - } - - msg := OrganizationEvent{ - Message: statusMessage, - EventType: event, - EventLevel: eventLevel, - } - - if err := s.conn.Model(&org).Association("Events").Append(&msg); err != nil { - return fmt.Errorf("error adding status message: %w", err) - } - - if maxEvents > 0 { - var latestEvents []OrganizationEvent - q := s.conn.Model(&OrganizationEvent{}). - Limit(maxEvents).Order("id desc"). - Where("org_id = ?", org.ID).Find(&latestEvents) - if q.Error != nil { - return fmt.Errorf("error fetching latest events: %w", q.Error) - } - if len(latestEvents) == maxEvents { - lastInList := latestEvents[len(latestEvents)-1] - if err := s.conn.Where("org_id = ? and id < ?", org.ID, lastInList.ID).Unscoped().Delete(&OrganizationEvent{}).Error; err != nil { - return fmt.Errorf("error deleting old events: %w", err) - } - } - } - return nil -} - -func (s *sqlDatabase) addEnterpriseEvent(ctx context.Context, entID string, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { - ent, err := s.getEnterpriseByID(ctx, s.conn, entID) - if err != nil { - return fmt.Errorf("error updating instance: %w", err) - } - - msg := EnterpriseEvent{ - Message: statusMessage, - EventType: event, - EventLevel: eventLevel, - } - - if err := s.conn.Model(&ent).Association("Events").Append(&msg); err != nil { - return fmt.Errorf("error adding status message: %w", err) - } - - if maxEvents > 0 { - var latestEvents []EnterpriseEvent - q := s.conn.Model(&EnterpriseEvent{}). - Limit(maxEvents).Order("id desc"). - Where("enterprise_id = ?", ent.ID).Find(&latestEvents) - if q.Error != nil { - return fmt.Errorf("error fetching latest events: %w", q.Error) - } - if len(latestEvents) == maxEvents { - lastInList := latestEvents[len(latestEvents)-1] - if err := s.conn.Where("enterprise_id = ? and id < ?", ent.ID, lastInList.ID).Unscoped().Delete(&EnterpriseEvent{}).Error; err != nil { - return fmt.Errorf("error deleting old events: %w", err) - } - } - } - - return nil -} - -func (s *sqlDatabase) AddEntityEvent(ctx context.Context, entity params.ForgeEntity, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { - if maxEvents == 0 { - return fmt.Errorf("max events cannot be 0: %w", runnerErrors.ErrBadRequest) - } - - switch entity.EntityType { - case params.ForgeEntityTypeRepository: - return s.addRepositoryEvent(ctx, entity.ID, event, eventLevel, statusMessage, maxEvents) - case params.ForgeEntityTypeOrganization: - return s.addOrgEvent(ctx, entity.ID, event, eventLevel, statusMessage, maxEvents) - case params.ForgeEntityTypeEnterprise: - return s.addEnterpriseEvent(ctx, entity.ID, event, eventLevel, statusMessage, maxEvents) - default: - return fmt.Errorf("invalid entity type: %w", runnerErrors.ErrBadRequest) - } -} - -func (s *sqlDatabase) sqlToCommonForgeCredentials(creds GithubCredentials) (params.ForgeCredentials, error) { - if len(creds.Payload) == 0 { - return params.ForgeCredentials{}, errors.New("empty credentials payload") - } - data, err := util.Unseal(creds.Payload, []byte(s.cfg.Passphrase)) - if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error unsealing credentials: %w", err) - } - - ep, err := s.sqlToCommonGithubEndpoint(creds.Endpoint) - if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error converting github endpoint: %w", err) - } - - commonCreds := params.ForgeCredentials{ - ID: creds.ID, - Name: creds.Name, - Description: creds.Description, - APIBaseURL: creds.Endpoint.APIBaseURL, - BaseURL: creds.Endpoint.BaseURL, - UploadBaseURL: creds.Endpoint.UploadBaseURL, - CABundle: creds.Endpoint.CACertBundle, - AuthType: creds.AuthType, - CreatedAt: creds.CreatedAt, - UpdatedAt: creds.UpdatedAt, - ForgeType: creds.Endpoint.EndpointType, - Endpoint: ep, - CredentialsPayload: data, - } - - for _, repo := range creds.Repositories { - commonRepo, err := s.sqlToCommonRepository(repo, false) - if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error converting github repository: %w", err) - } - commonCreds.Repositories = append(commonCreds.Repositories, commonRepo) - } - - for _, org := range creds.Organizations { - commonOrg, err := s.sqlToCommonOrganization(org, false) - if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error converting github organization: %w", err) - } - commonCreds.Organizations = append(commonCreds.Organizations, commonOrg) - } - - for _, ent := range creds.Enterprises { - commonEnt, err := s.sqlToCommonEnterprise(ent, false) - if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error converting github enterprise %s: %w", ent.Name, err) - } - commonCreds.Enterprises = append(commonCreds.Enterprises, commonEnt) - } - - return commonCreds, nil -} - -func (s *sqlDatabase) sqlGiteaToCommonForgeCredentials(creds GiteaCredentials) (params.ForgeCredentials, error) { - if len(creds.Payload) == 0 { - return params.ForgeCredentials{}, errors.New("empty credentials payload") - } - data, err := util.Unseal(creds.Payload, []byte(s.cfg.Passphrase)) - if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error unsealing credentials: %w", err) - } - - ep, err := s.sqlToCommonGithubEndpoint(creds.Endpoint) - if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error converting github endpoint: %w", err) - } - - commonCreds := params.ForgeCredentials{ - ID: creds.ID, - Name: creds.Name, - Description: creds.Description, - APIBaseURL: creds.Endpoint.APIBaseURL, - BaseURL: creds.Endpoint.BaseURL, - CABundle: creds.Endpoint.CACertBundle, - AuthType: creds.AuthType, - CreatedAt: creds.CreatedAt, - UpdatedAt: creds.UpdatedAt, - ForgeType: creds.Endpoint.EndpointType, - Endpoint: ep, - CredentialsPayload: data, - } - - for _, repo := range creds.Repositories { - commonRepo, err := s.sqlToCommonRepository(repo, false) - if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error converting github repository: %w", err) - } - commonCreds.Repositories = append(commonCreds.Repositories, commonRepo) - } - - for _, org := range creds.Organizations { - commonOrg, err := s.sqlToCommonOrganization(org, false) - if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error converting github organization: %w", err) - } - commonCreds.Organizations = append(commonCreds.Organizations, commonOrg) - } - - return commonCreds, nil -} - -func (s *sqlDatabase) sqlToCommonGithubEndpoint(ep GithubEndpoint) (params.ForgeEndpoint, error) { - return params.ForgeEndpoint{ - Name: ep.Name, - Description: ep.Description, - APIBaseURL: ep.APIBaseURL, - BaseURL: ep.BaseURL, - UploadBaseURL: ep.UploadBaseURL, - CACertBundle: ep.CACertBundle, - CreatedAt: ep.CreatedAt, - EndpointType: ep.EndpointType, - UpdatedAt: ep.UpdatedAt, - }, nil -} - -func getUIDFromContext(ctx context.Context) (uuid.UUID, error) { - userID := auth.UserID(ctx) - if userID == "" { - return uuid.Nil, fmt.Errorf("error getting UID from context: %w", runnerErrors.ErrUnauthorized) - } - - asUUID, err := uuid.Parse(userID) - if err != nil { - return uuid.Nil, fmt.Errorf("error parsing UID from context: %w", runnerErrors.ErrUnauthorized) - } - return asUUID, nil -} diff --git a/database/watcher/consumer.go b/database/watcher/consumer.go index ed0967e9..9282ece8 100644 --- a/database/watcher/consumer.go +++ b/database/watcher/consumer.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package watcher import ( diff --git a/database/watcher/filters.go b/database/watcher/filters.go index acf79ba8..af1852dc 100644 --- a/database/watcher/filters.go +++ b/database/watcher/filters.go @@ -1,26 +1,11 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package watcher import ( - commonParams "github.com/cloudbase/garm-provider-common/params" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" ) -type IDGetter interface { +type idGetter interface { GetID() string } @@ -77,7 +62,7 @@ func WithOperationTypeFilter(operationType dbCommon.OperationType) dbCommon.Payl // WithEntityPoolFilter returns true if the change payload is a pool that belongs to the // supplied Github entity. This is useful when an entity worker wants to watch for changes // in pools that belong to it. -func WithEntityPoolFilter(ghEntity params.ForgeEntity) dbCommon.PayloadFilterFunc { +func WithEntityPoolFilter(ghEntity params.GithubEntity) dbCommon.PayloadFilterFunc { return func(payload dbCommon.ChangePayload) bool { switch payload.EntityType { case dbCommon.PoolEntityType: @@ -86,51 +71,22 @@ func WithEntityPoolFilter(ghEntity params.ForgeEntity) dbCommon.PayloadFilterFun return false } switch ghEntity.EntityType { - case params.ForgeEntityTypeRepository: - return pool.RepoID == ghEntity.ID - case params.ForgeEntityTypeOrganization: - return pool.OrgID == ghEntity.ID - case params.ForgeEntityTypeEnterprise: - return pool.EnterpriseID == ghEntity.ID - default: - return false - } - default: - return false - } - } -} - -// WithEntityScaleSetFilter returns true if the change payload is a scale set that belongs to the -// supplied Github entity. -func WithEntityScaleSetFilter(ghEntity params.ForgeEntity) dbCommon.PayloadFilterFunc { - return func(payload dbCommon.ChangePayload) bool { - forgeType, err := ghEntity.GetForgeType() - if err != nil { - return false - } - - // Gitea does not have scale sets. - if forgeType == params.GiteaEndpointType { - return false - } - - switch payload.EntityType { - case dbCommon.ScaleSetEntityType: - scaleSet, ok := payload.Payload.(params.ScaleSet) - if !ok { - return false - } - switch ghEntity.EntityType { - case params.ForgeEntityTypeRepository: - return scaleSet.RepoID == ghEntity.ID - case params.ForgeEntityTypeOrganization: - return scaleSet.OrgID == ghEntity.ID - case params.ForgeEntityTypeEnterprise: - return scaleSet.EnterpriseID == ghEntity.ID + case params.GithubEntityTypeRepository: + if pool.RepoID != ghEntity.ID { + return false + } + case params.GithubEntityTypeOrganization: + if pool.OrgID != ghEntity.ID { + return false + } + case params.GithubEntityTypeEnterprise: + if pool.EnterpriseID != ghEntity.ID { + return false + } default: return false } + return true default: return false } @@ -139,26 +95,26 @@ func WithEntityScaleSetFilter(ghEntity params.ForgeEntity) dbCommon.PayloadFilte // WithEntityFilter returns a filter function that filters payloads by entity. // Change payloads that match the entity type and ID will return true. -func WithEntityFilter(entity params.ForgeEntity) dbCommon.PayloadFilterFunc { +func WithEntityFilter(entity params.GithubEntity) dbCommon.PayloadFilterFunc { return func(payload dbCommon.ChangePayload) bool { - if params.ForgeEntityType(payload.EntityType) != entity.EntityType { + if params.GithubEntityType(payload.EntityType) != entity.EntityType { return false } - var ent IDGetter + var ent idGetter var ok bool switch payload.EntityType { case dbCommon.RepositoryEntityType: - if entity.EntityType != params.ForgeEntityTypeRepository { + if entity.EntityType != params.GithubEntityTypeRepository { return false } ent, ok = payload.Payload.(params.Repository) case dbCommon.OrganizationEntityType: - if entity.EntityType != params.ForgeEntityTypeOrganization { + if entity.EntityType != params.GithubEntityTypeOrganization { return false } ent, ok = payload.Payload.(params.Organization) case dbCommon.EnterpriseEntityType: - if entity.EntityType != params.ForgeEntityTypeEnterprise { + if entity.EntityType != params.GithubEntityTypeEnterprise { return false } ent, ok = payload.Payload.(params.Enterprise) @@ -172,7 +128,7 @@ func WithEntityFilter(entity params.ForgeEntity) dbCommon.PayloadFilterFunc { } } -func WithEntityJobFilter(ghEntity params.ForgeEntity) dbCommon.PayloadFilterFunc { +func WithEntityJobFilter(ghEntity params.GithubEntity) dbCommon.PayloadFilterFunc { return func(payload dbCommon.ChangePayload) bool { switch payload.EntityType { case dbCommon.JobEntityType: @@ -182,15 +138,15 @@ func WithEntityJobFilter(ghEntity params.ForgeEntity) dbCommon.PayloadFilterFunc } switch ghEntity.EntityType { - case params.ForgeEntityTypeRepository: + case params.GithubEntityTypeRepository: if job.RepoID != nil && job.RepoID.String() != ghEntity.ID { return false } - case params.ForgeEntityTypeOrganization: + case params.GithubEntityTypeOrganization: if job.OrgID != nil && job.OrgID.String() != ghEntity.ID { return false } - case params.ForgeEntityTypeEnterprise: + case params.GithubEntityTypeEnterprise: if job.EnterpriseID != nil && job.EnterpriseID.String() != ghEntity.ID { return false } @@ -205,26 +161,17 @@ func WithEntityJobFilter(ghEntity params.ForgeEntity) dbCommon.PayloadFilterFunc } } -// WithForgeCredentialsFilter returns a filter function that filters payloads by Github or Gitea credentials. -func WithForgeCredentialsFilter(creds params.ForgeCredentials) dbCommon.PayloadFilterFunc { +// WithGithubCredentialsFilter returns a filter function that filters payloads by Github credentials. +func WithGithubCredentialsFilter(creds params.GithubCredentials) dbCommon.PayloadFilterFunc { return func(payload dbCommon.ChangePayload) bool { - var forgeCreds params.ForgeCredentials - var ok bool - switch payload.EntityType { - case dbCommon.GithubCredentialsEntityType, dbCommon.GiteaCredentialsEntityType: - forgeCreds, ok = payload.Payload.(params.ForgeCredentials) - default: + if payload.EntityType != dbCommon.GithubCredentialsEntityType { return false } + credsPayload, ok := payload.Payload.(params.GithubCredentials) if !ok { return false } - // Gite and Github creds have different models. The ID is uint, so we - // need to explicitly check their type, or risk a clash. - if forgeCreds.ForgeType != creds.ForgeType { - return false - } - return forgeCreds.GetID() == creds.GetID() + return credsPayload.ID == creds.ID } } @@ -263,76 +210,3 @@ func WithExcludeEntityTypeFilter(entityType dbCommon.DatabaseEntityType) dbCommo return payload.EntityType != entityType } } - -// WithScaleSetFilter returns a filter function that matches a particular scale set. -func WithScaleSetFilter(scaleset params.ScaleSet) dbCommon.PayloadFilterFunc { - return func(payload dbCommon.ChangePayload) bool { - if payload.EntityType != dbCommon.ScaleSetEntityType { - return false - } - - ss, ok := payload.Payload.(params.ScaleSet) - if !ok { - return false - } - - return ss.ID == scaleset.ID - } -} - -func WithScaleSetInstanceFilter(scaleset params.ScaleSet) dbCommon.PayloadFilterFunc { - return func(payload dbCommon.ChangePayload) bool { - if payload.EntityType != dbCommon.InstanceEntityType { - return false - } - - instance, ok := payload.Payload.(params.Instance) - if !ok || instance.ScaleSetID == 0 { - return false - } - - return instance.ScaleSetID == scaleset.ID - } -} - -// EntityTypeCallbackFilter is a callback function that takes a ChangePayload and returns a boolean. -// This callback type is used in the WithEntityTypeAndCallbackFilter (and potentially others) when -// a filter needs to delegate logic to a specific callback function. -type EntityTypeCallbackFilter func(payload dbCommon.ChangePayload) (bool, error) - -// WithEntityTypeAndCallbackFilter returns a filter function that filters payloads by entity type and the -// result of a callback function. -func WithEntityTypeAndCallbackFilter(entityType dbCommon.DatabaseEntityType, callback EntityTypeCallbackFilter) dbCommon.PayloadFilterFunc { - return func(payload dbCommon.ChangePayload) bool { - if payload.EntityType != entityType { - return false - } - - ok, err := callback(payload) - if err != nil { - return false - } - return ok - } -} - -func WithInstanceStatusFilter(statuses ...commonParams.InstanceStatus) dbCommon.PayloadFilterFunc { - return func(payload dbCommon.ChangePayload) bool { - if payload.EntityType != dbCommon.InstanceEntityType { - return false - } - instance, ok := payload.Payload.(params.Instance) - if !ok { - return false - } - if len(statuses) == 0 { - return false - } - for _, status := range statuses { - if instance.Status == status { - return true - } - } - return false - } -} diff --git a/database/watcher/producer.go b/database/watcher/producer.go index 927aada0..159ad843 100644 --- a/database/watcher/producer.go +++ b/database/watcher/producer.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package watcher import ( diff --git a/database/watcher/test_export.go b/database/watcher/test_export.go index eb3d38b6..f9b4ecf1 100644 --- a/database/watcher/test_export.go +++ b/database/watcher/test_export.go @@ -1,19 +1,6 @@ //go:build testing // +build testing -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package watcher import "github.com/cloudbase/garm/database/common" diff --git a/database/watcher/util_test.go b/database/watcher/util_test.go deleted file mode 100644 index 82b94491..00000000 --- a/database/watcher/util_test.go +++ /dev/null @@ -1,16 +0,0 @@ -package watcher_test - -import ( - "time" - - "github.com/cloudbase/garm/database/common" -) - -func waitForPayload(ch <-chan common.ChangePayload, timeout time.Duration) *common.ChangePayload { - select { - case payload := <-ch: - return &payload - case <-time.After(timeout): - return nil - } -} diff --git a/database/watcher/watcher.go b/database/watcher/watcher.go index 804dec70..ec81d5bd 100644 --- a/database/watcher/watcher.go +++ b/database/watcher/watcher.go @@ -1,25 +1,12 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package watcher import ( "context" - "fmt" "log/slog" "sync" + "github.com/pkg/errors" + "github.com/cloudbase/garm/database/common" garmUtil "github.com/cloudbase/garm/util" ) @@ -30,7 +17,7 @@ func InitWatcher(ctx context.Context) { if databaseWatcher != nil { return } - ctx = garmUtil.WithSlogContext(ctx, slog.Any("watcher", "database")) + ctx = garmUtil.WithContext(ctx, slog.Any("watcher", "database")) w := &watcher{ producers: make(map[string]*producer), consumers: make(map[string]*consumer), @@ -42,20 +29,11 @@ func InitWatcher(ctx context.Context) { databaseWatcher = w } -func CloseWatcher() error { - if databaseWatcher == nil { - return nil - } - databaseWatcher.Close() - databaseWatcher = nil - return nil -} - func RegisterProducer(ctx context.Context, id string) (common.Producer, error) { if databaseWatcher == nil { return nil, common.ErrWatcherNotInitialized } - ctx = garmUtil.WithSlogContext(ctx, slog.Any("producer_id", id)) + ctx = garmUtil.WithContext(ctx, slog.Any("producer_id", id)) return databaseWatcher.RegisterProducer(ctx, id) } @@ -63,7 +41,7 @@ func RegisterConsumer(ctx context.Context, id string, filters ...common.PayloadF if databaseWatcher == nil { return nil, common.ErrWatcherNotInitialized } - ctx = garmUtil.WithSlogContext(ctx, slog.Any("consumer_id", id)) + ctx = garmUtil.WithContext(ctx, slog.Any("consumer_id", id)) return databaseWatcher.RegisterConsumer(ctx, id, filters...) } @@ -82,7 +60,7 @@ func (w *watcher) RegisterProducer(ctx context.Context, id string) (common.Produ defer w.mux.Unlock() if _, ok := w.producers[id]; ok { - return nil, fmt.Errorf("producer_id %s: %w", id, common.ErrProducerAlreadyRegistered) + return nil, errors.Wrapf(common.ErrProducerAlreadyRegistered, "producer_id: %s", id) } p := &producer{ id: id, diff --git a/database/watcher/watcher_store_test.go b/database/watcher/watcher_store_test.go index 97fc8a9d..a0845b9c 100644 --- a/database/watcher/watcher_store_test.go +++ b/database/watcher/watcher_store_test.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package watcher_test import ( @@ -50,12 +36,12 @@ func (s *WatcherStoreTestSuite) TestJobWatcher() { consumeEvents(consumer) jobParams := params.Job{ - WorkflowJobID: 2, - RunID: 2, - Action: "test-action", - Conclusion: "started", - Status: "in_progress", - Name: "test-job", + ID: 1, + RunID: 2, + Action: "test-action", + Conclusion: "started", + Status: "in_progress", + Name: "test-job", } job, err := s.store.CreateOrUpdateJob(s.ctx, jobParams) @@ -76,8 +62,8 @@ func (s *WatcherStoreTestSuite) TestJobWatcher() { s.T().Fatal("expected payload not received") } - job.Conclusion = "success" - updatedJob, err := s.store.CreateOrUpdateJob(s.ctx, job) + jobParams.Conclusion = "success" + updatedJob, err := s.store.CreateOrUpdateJob(s.ctx, jobParams) s.Require().NoError(err) select { @@ -94,7 +80,7 @@ func (s *WatcherStoreTestSuite) TestJobWatcher() { entityID, err := uuid.NewUUID() s.Require().NoError(err) - err = s.store.LockJob(s.ctx, updatedJob.WorkflowJobID, entityID.String()) + err = s.store.LockJob(s.ctx, updatedJob.ID, entityID.String()) s.Require().NoError(err) select { @@ -110,7 +96,7 @@ func (s *WatcherStoreTestSuite) TestJobWatcher() { s.T().Fatal("expected payload not received") } - err = s.store.UnlockJob(s.ctx, updatedJob.WorkflowJobID, entityID.String()) + err = s.store.UnlockJob(s.ctx, updatedJob.ID, entityID.String()) s.Require().NoError(err) select { @@ -134,7 +120,7 @@ func (s *WatcherStoreTestSuite) TestJobWatcher() { // We don't care about the update event here. consumeEvents(consumer) - err = s.store.BreakLockJobIsQueued(s.ctx, updatedJob.WorkflowJobID) + err = s.store.BreakLockJobIsQueued(s.ctx, updatedJob.ID) s.Require().NoError(err) select { @@ -169,7 +155,7 @@ func (s *WatcherStoreTestSuite) TestInstanceWatcher() { creds := garmTesting.CreateTestGithubCredentials(s.ctx, "test-creds", s.store, s.T(), ep) s.T().Cleanup(func() { s.store.DeleteGithubCredentials(s.ctx, creds.ID) }) - repo, err := s.store.CreateRepository(s.ctx, "test-owner", "test-repo", creds, "test-secret", params.PoolBalancerTypeRoundRobin) + repo, err := s.store.CreateRepository(s.ctx, "test-owner", "test-repo", creds.Name, "test-secret", params.PoolBalancerTypeRoundRobin) s.Require().NoError(err) s.Require().NotEmpty(repo.ID) s.T().Cleanup(func() { s.store.DeleteRepository(s.ctx, repo.ID) }) @@ -255,112 +241,6 @@ func (s *WatcherStoreTestSuite) TestInstanceWatcher() { } } -func (s *WatcherStoreTestSuite) TestScaleSetInstanceWatcher() { - consumer, err := watcher.RegisterConsumer( - s.ctx, "instance-test", - watcher.WithEntityTypeFilter(common.InstanceEntityType), - watcher.WithAny( - watcher.WithOperationTypeFilter(common.CreateOperation), - watcher.WithOperationTypeFilter(common.UpdateOperation), - watcher.WithOperationTypeFilter(common.DeleteOperation)), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - s.T().Cleanup(func() { consumer.Close() }) - consumeEvents(consumer) - - ep := garmTesting.CreateDefaultGithubEndpoint(s.ctx, s.store, s.T()) - creds := garmTesting.CreateTestGithubCredentials(s.ctx, "test-creds", s.store, s.T(), ep) - s.T().Cleanup(func() { s.store.DeleteGithubCredentials(s.ctx, creds.ID) }) - - repo, err := s.store.CreateRepository(s.ctx, "test-owner", "test-repo", creds, "test-secret", params.PoolBalancerTypeRoundRobin) - s.Require().NoError(err) - s.Require().NotEmpty(repo.ID) - s.T().Cleanup(func() { s.store.DeleteRepository(s.ctx, repo.ID) }) - - entity, err := repo.GetEntity() - s.Require().NoError(err) - - createScaleSetParams := params.CreateScaleSetParams{ - ProviderName: "test-provider", - Name: "test-scaleset", - Image: "test-image", - Flavor: "test-flavor", - MinIdleRunners: 0, - MaxRunners: 1, - OSType: commonParams.Linux, - OSArch: commonParams.Amd64, - } - - scaleSet, err := s.store.CreateEntityScaleSet(s.ctx, entity, createScaleSetParams) - s.Require().NoError(err) - s.Require().NotEmpty(scaleSet.ID) - s.T().Cleanup(func() { s.store.DeleteScaleSetByID(s.ctx, scaleSet.ID) }) - - createInstanceParams := params.CreateInstanceParams{ - Name: "test-instance", - OSType: commonParams.Linux, - OSArch: commonParams.Amd64, - Status: commonParams.InstanceCreating, - } - instance, err := s.store.CreateScaleSetInstance(s.ctx, scaleSet.ID, createInstanceParams) - s.Require().NoError(err) - s.Require().NotEmpty(instance.ID) - - select { - case event := <-consumer.Watch(): - s.Require().Equal(common.ChangePayload{ - EntityType: common.InstanceEntityType, - Operation: common.CreateOperation, - Payload: instance, - }, event) - asInstance, ok := event.Payload.(params.Instance) - s.Require().True(ok) - s.Require().Equal(instance.Name, "test-instance") - s.Require().Equal(asInstance.Name, "test-instance") - case <-time.After(1 * time.Second): - s.T().Fatal("expected payload not received") - } - - updateParams := params.UpdateInstanceParams{ - RunnerStatus: params.RunnerActive, - } - - updatedInstance, err := s.store.UpdateInstance(s.ctx, instance.Name, updateParams) - s.Require().NoError(err) - - select { - case event := <-consumer.Watch(): - s.Require().Equal(common.ChangePayload{ - EntityType: common.InstanceEntityType, - Operation: common.UpdateOperation, - Payload: updatedInstance, - }, event) - case <-time.After(1 * time.Second): - s.T().Fatal("expected payload not received") - } - - err = s.store.DeleteInstanceByName(s.ctx, updatedInstance.Name) - s.Require().NoError(err) - - select { - case event := <-consumer.Watch(): - s.Require().Equal(common.ChangePayload{ - EntityType: common.InstanceEntityType, - Operation: common.DeleteOperation, - Payload: params.Instance{ - ID: updatedInstance.ID, - Name: updatedInstance.Name, - ProviderID: updatedInstance.ProviderID, - AgentID: updatedInstance.AgentID, - ScaleSetID: updatedInstance.ScaleSetID, - }, - }, event) - case <-time.After(1 * time.Second): - s.T().Fatal("expected payload not received") - } -} - func (s *WatcherStoreTestSuite) TestPoolWatcher() { consumer, err := watcher.RegisterConsumer( s.ctx, "pool-test", @@ -383,7 +263,7 @@ func (s *WatcherStoreTestSuite) TestPoolWatcher() { } }) - repo, err := s.store.CreateRepository(s.ctx, "test-owner", "test-repo", creds, "test-secret", params.PoolBalancerTypeRoundRobin) + repo, err := s.store.CreateRepository(s.ctx, "test-owner", "test-repo", creds.Name, "test-secret", params.PoolBalancerTypeRoundRobin) s.Require().NoError(err) s.Require().NotEmpty(repo.ID) s.T().Cleanup(func() { s.store.DeleteRepository(s.ctx, repo.ID) }) @@ -482,139 +362,6 @@ func (s *WatcherStoreTestSuite) TestPoolWatcher() { } } -func (s *WatcherStoreTestSuite) TestScaleSetWatcher() { - consumer, err := watcher.RegisterConsumer( - s.ctx, "scaleset-test", - watcher.WithEntityTypeFilter(common.ScaleSetEntityType), - watcher.WithAny( - watcher.WithOperationTypeFilter(common.CreateOperation), - watcher.WithOperationTypeFilter(common.UpdateOperation), - watcher.WithOperationTypeFilter(common.DeleteOperation)), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - s.T().Cleanup(func() { consumer.Close() }) - consumeEvents(consumer) - - ep := garmTesting.CreateDefaultGithubEndpoint(s.ctx, s.store, s.T()) - creds := garmTesting.CreateTestGithubCredentials(s.ctx, "test-creds", s.store, s.T(), ep) - s.T().Cleanup(func() { - if err := s.store.DeleteGithubCredentials(s.ctx, creds.ID); err != nil { - s.T().Logf("failed to delete Github credentials: %v", err) - } - }) - - repo, err := s.store.CreateRepository(s.ctx, "test-owner", "test-repo", creds, "test-secret", params.PoolBalancerTypeRoundRobin) - s.Require().NoError(err) - s.Require().NotEmpty(repo.ID) - s.T().Cleanup(func() { s.store.DeleteRepository(s.ctx, repo.ID) }) - - entity, err := repo.GetEntity() - s.Require().NoError(err) - - createScaleSetParams := params.CreateScaleSetParams{ - ProviderName: "test-provider", - Name: "test-scaleset", - Image: "test-image", - Flavor: "test-flavor", - MinIdleRunners: 0, - MaxRunners: 1, - OSType: commonParams.Linux, - OSArch: commonParams.Amd64, - Tags: []string{"test-tag"}, - } - scaleSet, err := s.store.CreateEntityScaleSet(s.ctx, entity, createScaleSetParams) - s.Require().NoError(err) - s.Require().NotEmpty(scaleSet.ID) - - select { - case event := <-consumer.Watch(): - s.Require().Equal(common.ChangePayload{ - EntityType: common.ScaleSetEntityType, - Operation: common.CreateOperation, - Payload: scaleSet, - }, event) - asScaleSet, ok := event.Payload.(params.ScaleSet) - s.Require().True(ok) - s.Require().Equal(scaleSet.Image, "test-image") - s.Require().Equal(asScaleSet.Image, "test-image") - case <-time.After(1 * time.Second): - s.T().Fatal("expected payload not received") - } - - updateParams := params.UpdateScaleSetParams{ - Flavor: "updated-flavor", - } - - callbackFn := func(old, newScaleSet params.ScaleSet) error { - s.Require().Equal(old.ID, newScaleSet.ID) - s.Require().Equal(old.Flavor, "test-flavor") - s.Require().Equal(newScaleSet.Flavor, "updated-flavor") - return nil - } - updatedScaleSet, err := s.store.UpdateEntityScaleSet(s.ctx, entity, scaleSet.ID, updateParams, callbackFn) - s.Require().NoError(err) - - select { - case event := <-consumer.Watch(): - s.Require().Equal(common.ChangePayload{ - EntityType: common.ScaleSetEntityType, - Operation: common.UpdateOperation, - Payload: updatedScaleSet, - }, event) - case <-time.After(1 * time.Second): - s.T().Fatal("expected payload not received") - } - - err = s.store.SetScaleSetLastMessageID(s.ctx, updatedScaleSet.ID, 99) - s.Require().NoError(err) - - select { - case event := <-consumer.Watch(): - asScaleSet, ok := event.Payload.(params.ScaleSet) - s.Require().True(ok) - s.Require().Equal(asScaleSet.ID, updatedScaleSet.ID) - s.Require().Equal(asScaleSet.LastMessageID, int64(99)) - case <-time.After(1 * time.Second): - s.T().Fatal("expected payload not received") - } - - err = s.store.SetScaleSetDesiredRunnerCount(s.ctx, updatedScaleSet.ID, 5) - s.Require().NoError(err) - - select { - case event := <-consumer.Watch(): - asScaleSet, ok := event.Payload.(params.ScaleSet) - s.Require().True(ok) - s.Require().Equal(asScaleSet.ID, updatedScaleSet.ID) - s.Require().Equal(asScaleSet.DesiredRunnerCount, 5) - case <-time.After(1 * time.Second): - s.T().Fatal("expected payload not received") - } - - err = s.store.DeleteScaleSetByID(s.ctx, scaleSet.ID) - s.Require().NoError(err) - - select { - case event := <-consumer.Watch(): - // We updated last message ID and desired runner count above. - updatedScaleSet.DesiredRunnerCount = 5 - updatedScaleSet.LastMessageID = 99 - payloadFromEvent, ok := event.Payload.(params.ScaleSet) - s.Require().True(ok) - updatedScaleSet.UpdatedAt = payloadFromEvent.UpdatedAt - updatedScaleSet.CreatedAt = payloadFromEvent.CreatedAt - updatedScaleSet.Endpoint = params.ForgeEndpoint{} - s.Require().Equal(common.ChangePayload{ - EntityType: common.ScaleSetEntityType, - Operation: common.DeleteOperation, - Payload: updatedScaleSet, - }, event) - case <-time.After(1 * time.Second): - s.T().Fatal("expected payload not received") - } -} - func (s *WatcherStoreTestSuite) TestControllerWatcher() { consumer, err := watcher.RegisterConsumer( s.ctx, "controller-test", @@ -665,7 +412,7 @@ func (s *WatcherStoreTestSuite) TestEnterpriseWatcher() { creds := garmTesting.CreateTestGithubCredentials(s.ctx, "test-creds", s.store, s.T(), ep) s.T().Cleanup(func() { s.store.DeleteGithubCredentials(s.ctx, creds.ID) }) - ent, err := s.store.CreateEnterprise(s.ctx, "test-enterprise", creds, "test-secret", params.PoolBalancerTypeRoundRobin) + ent, err := s.store.CreateEnterprise(s.ctx, "test-enterprise", creds.Name, "test-secret", params.PoolBalancerTypeRoundRobin) s.Require().NoError(err) s.Require().NotEmpty(ent.ID) @@ -732,7 +479,7 @@ func (s *WatcherStoreTestSuite) TestOrgWatcher() { creds := garmTesting.CreateTestGithubCredentials(s.ctx, "test-creds", s.store, s.T(), ep) s.T().Cleanup(func() { s.store.DeleteGithubCredentials(s.ctx, creds.ID) }) - org, err := s.store.CreateOrganization(s.ctx, "test-org", creds, "test-secret", params.PoolBalancerTypeRoundRobin) + org, err := s.store.CreateOrganization(s.ctx, "test-org", creds.Name, "test-secret", params.PoolBalancerTypeRoundRobin) s.Require().NoError(err) s.Require().NotEmpty(org.ID) @@ -799,7 +546,7 @@ func (s *WatcherStoreTestSuite) TestRepoWatcher() { creds := garmTesting.CreateTestGithubCredentials(s.ctx, "test-creds", s.store, s.T(), ep) s.T().Cleanup(func() { s.store.DeleteGithubCredentials(s.ctx, creds.ID) }) - repo, err := s.store.CreateRepository(s.ctx, "test-owner", "test-repo", creds, "test-secret", params.PoolBalancerTypeRoundRobin) + repo, err := s.store.CreateRepository(s.ctx, "test-owner", "test-repo", creds.Name, "test-secret", params.PoolBalancerTypeRoundRobin) s.Require().NoError(err) s.Require().NotEmpty(repo.ID) @@ -867,7 +614,7 @@ func (s *WatcherStoreTestSuite) TestGithubCredentialsWatcher() { Name: "test-creds", Description: "test credentials", Endpoint: "github.com", - AuthType: params.ForgeAuthTypePAT, + AuthType: params.GithubAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "bogus", }, @@ -917,105 +664,13 @@ func (s *WatcherStoreTestSuite) TestGithubCredentialsWatcher() { EntityType: common.GithubCredentialsEntityType, Operation: common.DeleteOperation, // We only get the ID and Name of the deleted entity - Payload: params.ForgeCredentials{ID: ghCred.ID, Name: ghCred.Name}, + Payload: params.GithubCredentials{ID: ghCred.ID, Name: ghCred.Name}, }, event) case <-time.After(1 * time.Second): s.T().Fatal("expected payload not received") } } -func (s *WatcherStoreTestSuite) TestGiteaCredentialsWatcher() { - consumer, err := watcher.RegisterConsumer( - s.ctx, "gitea-cred-test", - watcher.WithEntityTypeFilter(common.GiteaCredentialsEntityType), - watcher.WithAny( - watcher.WithOperationTypeFilter(common.CreateOperation), - watcher.WithOperationTypeFilter(common.UpdateOperation), - watcher.WithOperationTypeFilter(common.DeleteOperation)), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - s.T().Cleanup(func() { consumer.Close() }) - consumeEvents(consumer) - - testEndpointParams := params.CreateGiteaEndpointParams{ - Name: "test", - Description: "test endpoint", - APIBaseURL: "https://api.gitea.example.com", - BaseURL: "https://gitea.example.com", - } - - testEndpoint, err := s.store.CreateGiteaEndpoint(s.ctx, testEndpointParams) - s.Require().NoError(err) - s.Require().NotEmpty(testEndpoint.Name) - - s.T().Cleanup(func() { - if err := s.store.DeleteGiteaEndpoint(s.ctx, testEndpoint.Name); err != nil { - s.T().Logf("failed to delete Gitea endpoint: %v", err) - } - consumeEvents(consumer) - }) - - giteaCredParams := params.CreateGiteaCredentialsParams{ - Name: "test-creds", - Description: "test credentials", - Endpoint: testEndpoint.Name, - AuthType: params.ForgeAuthTypePAT, - PAT: params.GithubPAT{ - OAuth2Token: "bogus", - }, - } - - giteaCred, err := s.store.CreateGiteaCredentials(s.ctx, giteaCredParams) - s.Require().NoError(err) - s.Require().NotEmpty(giteaCred.ID) - - select { - case event := <-consumer.Watch(): - s.Require().Equal(common.ChangePayload{ - EntityType: common.GiteaCredentialsEntityType, - Operation: common.CreateOperation, - Payload: giteaCred, - }, event) - case <-time.After(1 * time.Second): - s.T().Fatal("expected payload not received") - } - - newDesc := "updated test description" - updateParams := params.UpdateGiteaCredentialsParams{ - Description: &newDesc, - } - - updatedGiteaCred, err := s.store.UpdateGiteaCredentials(s.ctx, giteaCred.ID, updateParams) - s.Require().NoError(err) - s.Require().Equal(newDesc, updatedGiteaCred.Description) - - select { - case event := <-consumer.Watch(): - s.Require().Equal(common.ChangePayload{ - EntityType: common.GiteaCredentialsEntityType, - Operation: common.UpdateOperation, - Payload: updatedGiteaCred, - }, event) - case <-time.After(1 * time.Second): - s.T().Fatal("expected payload not received") - } - - err = s.store.DeleteGiteaCredentials(s.ctx, giteaCred.ID) - s.Require().NoError(err) - - select { - case event := <-consumer.Watch(): - asCreds, ok := event.Payload.(params.ForgeCredentials) - s.Require().True(ok) - s.Require().Equal(event.Operation, common.DeleteOperation) - s.Require().Equal(event.EntityType, common.GiteaCredentialsEntityType) - s.Require().Equal(asCreds.ID, updatedGiteaCred.ID) - case <-time.After(1 * time.Second): - s.T().Fatal("expected payload not received") - } -} - func (s *WatcherStoreTestSuite) TestGithubEndpointWatcher() { consumer, err := watcher.RegisterConsumer( s.ctx, "gh-ep-test", @@ -1082,7 +737,7 @@ func (s *WatcherStoreTestSuite) TestGithubEndpointWatcher() { EntityType: common.GithubEndpointEntityType, Operation: common.DeleteOperation, // We only get the name of the deleted entity - Payload: params.ForgeEndpoint{Name: ghEp.Name}, + Payload: params.GithubEndpoint{Name: ghEp.Name}, }, event) case <-time.After(1 * time.Second): s.T().Fatal("expected payload not received") @@ -1093,12 +748,9 @@ func consumeEvents(consumer common.Consumer) { consume: for { select { - case _, ok := <-consumer.Watch(): + case <-consumer.Watch(): // throw away event. - if !ok { - return - } - case <-time.After(20 * time.Millisecond): + case <-time.After(100 * time.Millisecond): break consume } } diff --git a/database/watcher/watcher_test.go b/database/watcher/watcher_test.go index fcbcc4eb..c5b56fe2 100644 --- a/database/watcher/watcher_test.go +++ b/database/watcher/watcher_test.go @@ -1,35 +1,19 @@ //go:build testing -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package watcher_test import ( "context" - "fmt" "testing" "time" - "github.com/google/uuid" + "github.com/pkg/errors" "github.com/stretchr/testify/suite" - commonParams "github.com/cloudbase/garm-provider-common/params" "github.com/cloudbase/garm/database" "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" garmTesting "github.com/cloudbase/garm/internal/testing" - "github.com/cloudbase/garm/params" ) type WatcherTestSuite struct { @@ -53,7 +37,6 @@ func (s *WatcherTestSuite) TearDownTest() { currentWatcher := watcher.GetWatcher() if currentWatcher != nil { currentWatcher.Close() - watcher.SetWatcher(nil) } } @@ -61,7 +44,6 @@ func (s *WatcherTestSuite) TestRegisterConsumerTwiceWillError() { consumer, err := watcher.RegisterConsumer(s.ctx, "test") s.Require().NoError(err) s.Require().NotNil(consumer) - consumeEvents(consumer) consumer, err = watcher.RegisterConsumer(s.ctx, "test") s.Require().ErrorIs(err, common.ErrConsumerAlreadyRegistered) @@ -118,7 +100,6 @@ func (s *WatcherTestSuite) TestProducerAndConsumer() { watcher.WithOperationTypeFilter(common.UpdateOperation)) s.Require().NoError(err) s.Require().NotNil(consumer) - consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.ControllerEntityType, @@ -132,7 +113,7 @@ func (s *WatcherTestSuite) TestProducerAndConsumer() { s.Require().Equal(payload, receivedPayload) } -func (s *WatcherTestSuite) TestConsumeWithFilter() { +func (s *WatcherTestSuite) TestConsumetWithFilter() { producer, err := watcher.RegisterProducer(s.ctx, "test-producer") s.Require().NoError(err) s.Require().NotNil(producer) @@ -143,7 +124,6 @@ func (s *WatcherTestSuite) TestConsumeWithFilter() { watcher.WithOperationTypeFilter(common.UpdateOperation)) s.Require().NoError(err) s.Require().NotNil(consumer) - consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.ControllerEntityType, @@ -153,9 +133,12 @@ func (s *WatcherTestSuite) TestConsumeWithFilter() { err = producer.Notify(payload) s.Require().NoError(err) - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().NotNil(receivedPayload) - s.Require().Equal(payload, *receivedPayload) + select { + case receivedPayload := <-consumer.Watch(): + s.Require().Equal(payload, receivedPayload) + case <-time.After(1 * time.Second): + s.T().Fatal("expected payload not received") + } payload = common.ChangePayload{ EntityType: common.ControllerEntityType, @@ -165,143 +148,11 @@ func (s *WatcherTestSuite) TestConsumeWithFilter() { err = producer.Notify(payload) s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) -} - -func (s *WatcherTestSuite) TestWithAnyFilter() { - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithAny( - watcher.WithEntityTypeFilter(common.ControllerEntityType), - watcher.WithEntityFilter(params.ForgeEntity{ - EntityType: params.ForgeEntityTypeRepository, - Owner: "test", - Name: "test", - ID: "test", - }), - )) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.ControllerEntityType, - Operation: common.UpdateOperation, - Payload: "test", + select { + case <-consumer.Watch(): + s.T().Fatal("unexpected payload received") + case <-time.After(1 * time.Second): } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().NotNil(receivedPayload) - s.Require().Equal(payload, *receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.RepositoryEntityType, - Operation: common.UpdateOperation, - Payload: params.Repository{ - Owner: "test", - Name: "test", - ID: "test", - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().NotNil(receivedPayload) - s.Require().Equal(payload, *receivedPayload) - - // We're not watching for this repo - payload = common.ChangePayload{ - EntityType: common.RepositoryEntityType, - Operation: common.UpdateOperation, - Payload: params.Repository{ - Owner: "test", - Name: "test", - ID: "test2", - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) - - // We're not watching for orgs - payload = common.ChangePayload{ - EntityType: common.OrganizationEntityType, - Operation: common.UpdateOperation, - Payload: params.Repository{ - Owner: "test", - Name: "test", - ID: "test2", - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) -} - -func (s *WatcherTestSuite) TestWithAllFilter() { - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithAll( - watcher.WithEntityFilter(params.ForgeEntity{ - EntityType: params.ForgeEntityTypeRepository, - Owner: "test", - Name: "test", - ID: "test", - }), - watcher.WithOperationTypeFilter(common.CreateOperation), - )) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.RepositoryEntityType, - Operation: common.CreateOperation, - Payload: params.Repository{ - Owner: "test", - Name: "test", - ID: "test", - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().NotNil(receivedPayload) - s.Require().Equal(payload, *receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.RepositoryEntityType, - Operation: common.UpdateOperation, - Payload: params.Repository{ - Owner: "test", - Name: "test", - ID: "test", - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) } func maybeInitController(db common.Store) error { @@ -310,1155 +161,12 @@ func maybeInitController(db common.Store) error { } if _, err := db.InitController(); err != nil { - return fmt.Errorf("error initializing controller: %w", err) + return errors.Wrap(err, "initializing controller") } return nil } -func (s *WatcherTestSuite) TestWithEntityPoolFilterRepository() { - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - entity := params.ForgeEntity{ - EntityType: params.ForgeEntityTypeRepository, - Owner: "test", - Name: "test", - ID: "test", - } - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithEntityPoolFilter(entity), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.PoolEntityType, - Operation: common.UpdateOperation, - Payload: params.Pool{ - ID: "test", - RepoID: "test", - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().NotNil(receivedPayload) - s.Require().Equal(payload, *receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.PoolEntityType, - Operation: common.UpdateOperation, - Payload: params.Pool{ - ID: "test", - RepoID: "test2", - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) -} - -func (s *WatcherTestSuite) TestWithEntityPoolFilterOrg() { - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - entity := params.ForgeEntity{ - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - ID: "test", - } - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithEntityPoolFilter(entity), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.PoolEntityType, - Operation: common.UpdateOperation, - Payload: params.Pool{ - ID: "test", - OrgID: "test", - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().NotNil(receivedPayload) - s.Require().Equal(payload, *receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.PoolEntityType, - Operation: common.UpdateOperation, - Payload: params.Pool{ - ID: "test", - OrgID: "test2", - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) -} - -func (s *WatcherTestSuite) TestWithEntityPoolFilterEnterprise() { - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - entity := params.ForgeEntity{ - EntityType: params.ForgeEntityTypeEnterprise, - Name: "test", - ID: "test", - } - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithEntityPoolFilter(entity), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.PoolEntityType, - Operation: common.UpdateOperation, - Payload: params.Pool{ - ID: "test", - EnterpriseID: "test", - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().NotNil(receivedPayload) - s.Require().Equal(payload, *receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.PoolEntityType, - Operation: common.UpdateOperation, - Payload: params.Pool{ - ID: "test", - EnterpriseID: "test2", - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) - - // Invalid payload for declared entity type - payload = common.ChangePayload{ - EntityType: common.PoolEntityType, - Operation: common.UpdateOperation, - Payload: params.ScaleSet{ - ID: 1, - EnterpriseID: "test2", - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) -} - -func (s *WatcherTestSuite) TestWithEntityPoolFilterBogusEntityType() { - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - entity := params.ForgeEntity{ - // This should trigger the default branch in the filter and - // return false - EntityType: params.ForgeEntityType("bogus"), - Name: "test", - ID: "test", - } - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithEntityPoolFilter(entity), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.PoolEntityType, - Operation: common.UpdateOperation, - Payload: params.Pool{ - ID: "test", - EnterpriseID: "test", - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.PoolEntityType, - Operation: common.UpdateOperation, - Payload: params.Pool{ - ID: "test", - EnterpriseID: "test2", - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) -} - -func (s *WatcherTestSuite) TestWithEntityScaleSetFilterRepository() { - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - entity := params.ForgeEntity{ - EntityType: params.ForgeEntityTypeRepository, - Owner: "test", - Name: "test", - ID: "test", - Credentials: params.ForgeCredentials{ - ForgeType: params.GithubEndpointType, - }, - } - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithEntityScaleSetFilter(entity), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.ScaleSetEntityType, - Operation: common.UpdateOperation, - Payload: params.ScaleSet{ - ID: 1, - RepoID: "test", - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().NotNil(receivedPayload) - s.Require().Equal(payload, *receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.ScaleSetEntityType, - Operation: common.UpdateOperation, - Payload: params.ScaleSet{ - ID: 1, - RepoID: "test2", - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) -} - -func (s *WatcherTestSuite) TestWithEntityScaleSetFilterOrg() { - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - entity := params.ForgeEntity{ - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - ID: "test", - Credentials: params.ForgeCredentials{ - ForgeType: params.GithubEndpointType, - }, - } - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithEntityScaleSetFilter(entity), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.ScaleSetEntityType, - Operation: common.UpdateOperation, - Payload: params.ScaleSet{ - ID: 1, - OrgID: "test", - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().NotNil(receivedPayload) - s.Require().Equal(payload, *receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.ScaleSetEntityType, - Operation: common.UpdateOperation, - Payload: params.ScaleSet{ - ID: 1, - OrgID: "test2", - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) -} - -func (s *WatcherTestSuite) TestWithEntityScaleSetFilterEnterprise() { - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - entity := params.ForgeEntity{ - EntityType: params.ForgeEntityTypeEnterprise, - Name: "test", - ID: "test", - Credentials: params.ForgeCredentials{ - ForgeType: params.GithubEndpointType, - }, - } - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithEntityScaleSetFilter(entity), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.ScaleSetEntityType, - Operation: common.UpdateOperation, - Payload: params.ScaleSet{ - ID: 1, - EnterpriseID: "test", - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().NotNil(receivedPayload) - s.Require().Equal(payload, *receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.ScaleSetEntityType, - Operation: common.UpdateOperation, - Payload: params.ScaleSet{ - ID: 1, - EnterpriseID: "test2", - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) -} - -func (s *WatcherTestSuite) TestWithEntityScaleSetFilterBogusEntityType() { - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - entity := params.ForgeEntity{ - // This should trigger the default branch in the filter and - // return false - EntityType: params.ForgeEntityType("bogus"), - Name: "test", - ID: "test", - Credentials: params.ForgeCredentials{ - ForgeType: params.GithubEndpointType, - }, - } - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithEntityScaleSetFilter(entity), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.ScaleSetEntityType, - Operation: common.UpdateOperation, - Payload: params.ScaleSet{ - ID: 1, - EnterpriseID: "test", - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.ScaleSetEntityType, - Operation: common.UpdateOperation, - Payload: params.ScaleSet{ - ID: 1, - EnterpriseID: "test2", - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) -} - -func (s *WatcherTestSuite) TestWithEntityScaleSetFilterReturnsFalseForGiteaEndpoints() { - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - entity := params.ForgeEntity{ - EntityType: params.ForgeEntityTypeRepository, - Owner: "test", - Name: "test", - ID: "test", - Credentials: params.ForgeCredentials{ - ForgeType: params.GiteaEndpointType, - }, - } - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithEntityScaleSetFilter(entity), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.ScaleSetEntityType, - Operation: common.UpdateOperation, - Payload: params.ScaleSet{ - ID: 1, - RepoID: "test", - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) -} - -func (s *WatcherTestSuite) TestWithEntityFilterRepository() { - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - entity := params.ForgeEntity{ - EntityType: params.ForgeEntityTypeRepository, - Owner: "test", - Name: "test", - ID: "test", - } - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithEntityFilter(entity), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.RepositoryEntityType, - Operation: common.UpdateOperation, - Payload: params.Repository{ - ID: "test", - Name: "test", - Owner: "test", - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().NotNil(receivedPayload) - s.Require().Equal(payload, *receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.RepositoryEntityType, - Operation: common.UpdateOperation, - Payload: params.Repository{ - ID: "test2", - Name: "test", - Owner: "test", - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) -} - -func (s *WatcherTestSuite) TestWithEntityFilterOrg() { - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - entity := params.ForgeEntity{ - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - ID: "test", - } - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithEntityFilter(entity), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.OrganizationEntityType, - Operation: common.UpdateOperation, - Payload: params.Organization{ - ID: "test", - Name: "test", - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().NotNil(receivedPayload) - s.Require().Equal(payload, *receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.OrganizationEntityType, - Operation: common.UpdateOperation, - Payload: params.Organization{ - ID: "test2", - Name: "test", - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) -} - -func (s *WatcherTestSuite) TestWithEntityFilterEnterprise() { - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - entity := params.ForgeEntity{ - EntityType: params.ForgeEntityTypeEnterprise, - Name: "test", - ID: "test", - } - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithEntityFilter(entity), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.EnterpriseEntityType, - Operation: common.UpdateOperation, - Payload: params.Enterprise{ - ID: "test", - Name: "test", - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().NotNil(receivedPayload) - s.Require().Equal(payload, *receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.EnterpriseEntityType, - Operation: common.UpdateOperation, - Payload: params.Enterprise{ - ID: "test2", - Name: "test", - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) -} - -func (s *WatcherTestSuite) TestWithEntityJobFilterRepository() { - repoUUID, err := uuid.NewUUID() - s.Require().NoError(err) - - repoUUID2, err := uuid.NewUUID() - s.Require().NoError(err) - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - entity := params.ForgeEntity{ - EntityType: params.ForgeEntityTypeRepository, - Owner: "test", - Name: "test", - ID: repoUUID.String(), - } - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithEntityJobFilter(entity), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.JobEntityType, - Operation: common.UpdateOperation, - Payload: params.Job{ - ID: 1, - Name: "test", - RepoID: &repoUUID, - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().NotNil(receivedPayload) - s.Require().Equal(payload, *receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.JobEntityType, - Operation: common.UpdateOperation, - Payload: params.Job{ - ID: 1, - Name: "test", - RepoID: &repoUUID2, - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) -} - -func (s *WatcherTestSuite) TestWithEntityJobFilterOrg() { - orgUUID, err := uuid.NewUUID() - s.Require().NoError(err) - - orgUUID2, err := uuid.NewUUID() - s.Require().NoError(err) - - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - entity := params.ForgeEntity{ - EntityType: params.ForgeEntityTypeOrganization, - Name: "test", - ID: orgUUID.String(), - } - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithEntityJobFilter(entity), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.JobEntityType, - Operation: common.UpdateOperation, - Payload: params.Job{ - ID: 1, - Name: "test", - OrgID: &orgUUID, - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().NotNil(receivedPayload) - s.Require().Equal(payload, *receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.JobEntityType, - Operation: common.UpdateOperation, - Payload: params.Job{ - ID: 1, - Name: "test", - OrgID: &orgUUID2, - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) -} - -func (s *WatcherTestSuite) TestWithEntityJobFilterEnterprise() { - entUUID, err := uuid.NewUUID() - s.Require().NoError(err) - - entUUID2, err := uuid.NewUUID() - s.Require().NoError(err) - - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - entity := params.ForgeEntity{ - EntityType: params.ForgeEntityTypeEnterprise, - Name: "test", - ID: entUUID.String(), - } - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithEntityJobFilter(entity), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.JobEntityType, - Operation: common.UpdateOperation, - Payload: params.Job{ - ID: 1, - Name: "test", - EnterpriseID: &entUUID, - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().NotNil(receivedPayload) - s.Require().Equal(payload, *receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.JobEntityType, - Operation: common.UpdateOperation, - Payload: params.Job{ - ID: 1, - Name: "test", - EnterpriseID: &entUUID2, - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) -} - -func (s *WatcherTestSuite) TestWithEntityJobFilterBogusEntityType() { - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - entity := params.ForgeEntity{ - // This should trigger the default branch in the filter and - // return false - EntityType: params.ForgeEntityType("bogus"), - Name: "test", - ID: "test", - } - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithEntityJobFilter(entity), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.JobEntityType, - Operation: common.UpdateOperation, - Payload: params.Job{ - ID: 1, - Name: "test", - EnterpriseID: nil, - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.JobEntityType, - Operation: common.UpdateOperation, - Payload: params.Job{ - ID: 1, - Name: "test", - EnterpriseID: nil, - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) -} - -func (s *WatcherTestSuite) TestWithNone() { - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithNone(), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.RepositoryEntityType, - Operation: common.UpdateOperation, - Payload: params.Repository{ - ID: "test", - Name: "test", - Owner: "test", - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) -} - -func (s *WatcherTestSuite) TestWithUserIDFilter() { - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - userID, err := uuid.NewUUID() - s.Require().NoError(err) - - userID2, err := uuid.NewUUID() - s.Require().NoError(err) - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithUserIDFilter(userID.String()), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.UserEntityType, - Operation: common.UpdateOperation, - Payload: params.User{ - ID: userID.String(), - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().NotNil(receivedPayload) - s.Require().Equal(payload, *receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.UserEntityType, - Operation: common.UpdateOperation, - Payload: params.User{ - ID: userID2.String(), - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.UserEntityType, - Operation: common.UpdateOperation, - // Declare as user, but payload is a pool. Filter should return false. - Payload: params.Pool{}, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) -} - -func (s *WatcherTestSuite) TestWithForgeCredentialsGithub() { - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - creds := params.ForgeCredentials{ - ForgeType: params.GithubEndpointType, - ID: 1, - } - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithForgeCredentialsFilter(creds), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.GithubCredentialsEntityType, - Operation: common.UpdateOperation, - Payload: params.ForgeCredentials{ - ForgeType: params.GithubEndpointType, - ID: 1, - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().NotNil(receivedPayload) - s.Require().Equal(payload, *receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.GiteaCredentialsEntityType, - Operation: common.UpdateOperation, - Payload: params.ForgeCredentials{ - ForgeType: params.GiteaEndpointType, - ID: 1, - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.GiteaCredentialsEntityType, - Operation: common.UpdateOperation, - Payload: params.Pool{}, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) -} - -func (s *WatcherTestSuite) TestWithcaleSetFilter() { - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - scaleSet := params.ScaleSet{ - ID: 1, - } - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithScaleSetFilter(scaleSet), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.ScaleSetEntityType, - Operation: common.UpdateOperation, - Payload: params.ScaleSet{ - ID: 1, - Name: "test", - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().NotNil(receivedPayload) - s.Require().Equal(payload, *receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.ScaleSetEntityType, - Operation: common.UpdateOperation, - Payload: params.ScaleSet{ - ID: 2, - Name: "test", - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.ScaleSetEntityType, - Operation: common.UpdateOperation, - Payload: params.Pool{}, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) -} - -func (s *WatcherTestSuite) TestWithExcludeEntityTypeFilter() { - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithExcludeEntityTypeFilter(common.RepositoryEntityType), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.RepositoryEntityType, - Operation: common.UpdateOperation, - Payload: params.Repository{ - ID: "test", - Name: "test", - Owner: "test", - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.OrganizationEntityType, - Operation: common.UpdateOperation, - Payload: params.Repository{ - ID: "test", - Name: "test", - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().NotNil(receivedPayload) - s.Require().Equal(payload, *receivedPayload) -} - -func (s *WatcherTestSuite) TestWithInstanceStatusFilter() { - producer, err := watcher.RegisterProducer(s.ctx, "test-producer") - s.Require().NoError(err) - s.Require().NotNil(producer) - - consumer, err := watcher.RegisterConsumer( - s.ctx, "test-consumer", - watcher.WithInstanceStatusFilter( - commonParams.InstanceCreating, - commonParams.InstanceDeleting), - ) - s.Require().NoError(err) - s.Require().NotNil(consumer) - consumeEvents(consumer) - - payload := common.ChangePayload{ - EntityType: common.InstanceEntityType, - Operation: common.UpdateOperation, - Payload: params.Instance{ - ID: "test-instance", - Status: commonParams.InstanceCreating, - }, - } - err = producer.Notify(payload) - s.Require().NoError(err) - - receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().NotNil(receivedPayload) - s.Require().Equal(payload, *receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.InstanceEntityType, - Operation: common.UpdateOperation, - Payload: params.Instance{ - ID: "test-instance", - Status: commonParams.InstanceDeleted, - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().Nil(receivedPayload) - - payload = common.ChangePayload{ - EntityType: common.InstanceEntityType, - Operation: common.UpdateOperation, - Payload: params.Instance{ - ID: "test-instance", - Status: commonParams.InstanceDeleting, - }, - } - - err = producer.Notify(payload) - s.Require().NoError(err) - receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) - s.Require().NotNil(receivedPayload) - s.Require().Equal(payload, *receivedPayload) -} - func TestWatcherTestSuite(t *testing.T) { // Watcher tests watcherSuite := &WatcherTestSuite{ diff --git a/doc/building_from_source.md b/doc/building_from_source.md index e5d2d0fd..9058820e 100644 --- a/doc/building_from_source.md +++ b/doc/building_from_source.md @@ -6,13 +6,12 @@ First, clone the repository: ```bash git clone https://github.com/cloudbase/garm -cd garm ``` Then build garm: ```bash -make build +make ``` You should now have both `garm` and `garm-cli` available in the `./bin` folder. @@ -23,65 +22,4 @@ If you have docker/podman installed, you can also build a static binary against make build-static ``` -This command will also build for both AMD64 and ARM64. Resulting binaries will be in the `./bin` folder. - -## Hacking - -If you're hacking on GARM and want to override the default version GARM injects, you can run the following command: - -```bash -VERSION=v1.0.0 make build -``` - -> [!IMPORTANT] -> This only works for `make build`. The `make build-static` command does not support version overrides. - -## The Web UI SPA - -GARM now ships with a single page application. The application is written in svelte and tailwind CSS. To rebuild it or hack on it, you will need a number of dependencies installed and placed in your `$PATH`. - -### Prerequisites - -- **Node.js 24+** and **npm** -- **Go 1.21+** (for building the GARM backend) -- **openapi-generator-cli** in your PATH (for API client generation) - -### Installing openapi-generator-cli - -**Option 1: NPM Global Install** -```bash -npm install -g @openapitools/openapi-generator-cli -``` - -**Option 2: Manual Install** -Download from [OpenAPI Generator releases](https://github.com/OpenAPITools/openapi-generator/releases) and add to your PATH. - -**Verify Installation:** - -```bash -openapi-generator-cli version -``` - - - -### Hacking on the Web UI - -If you need to change something in the `webapp/src` folder, make sure to rebuild the webapp before rebuilding GARM: - -```bash -make build-webui -make build -``` - -> [!IMPORTANT] -> The Web UI that GARM ships with has `go generate` stanzas that require `@openapitools/openapi-generator-cli` and `tailwindcss` to be installed. You will also have to make sure that if you change API models, the Web UI still works, as adding new fields or changing the json tags of old fields will change accessors in the client code. - -### Changing API models - -If you need to change the models in the `params/` package, you will also need to regenerate the client both for garm-cli and for the web application we ship with GARM. To do this, you can run: - -```bash -make generate -``` - -You will also need to make sure that the web app still works. +This command will also build for both AMD64 and ARM64. Resulting binaries will be in the `./bin` folder. \ No newline at end of file diff --git a/doc/config.md b/doc/config.md index 3c67e1b4..8b4d3a05 100644 --- a/doc/config.md +++ b/doc/config.md @@ -473,8 +473,6 @@ The config options are fairly straight forward. certificate = "" # The path on disk to the corresponding private key for the certificate. key = "" - [apiserver.webui] - enable = true ``` The GARM API server has the option to enable TLS, but I suggest you use a reverse proxy and enable TLS termination in that reverse proxy. There is an `nginx` sample in this repository with TLS termination enabled. diff --git a/doc/events.md b/doc/events.md index e643a5c2..6bc61a9d 100644 --- a/doc/events.md +++ b/doc/events.md @@ -88,9 +88,7 @@ The filter is defined as a JSON that you write over the websocket connections. T "job", "controller", "github_credentials", - "gitea_credentials", - "github_endpoint", - "scaleset" + "github_endpoint" ], "title": "entity type", "description": "The type of entity to filter on", diff --git a/doc/gitea.md b/doc/gitea.md deleted file mode 100644 index 72d3a202..00000000 --- a/doc/gitea.md +++ /dev/null @@ -1,358 +0,0 @@ -# Using GARM with Gitea - -Starting with Gitea 1.24 and the latest version of GARM (upcomming v0.2.0 - currently `main`), GARM supports Gitea as a forge, side by side with GitHub/GHES. A new endpoint type has been added to represent Gitea instances, which you can configure and use along side your GitHub runners. - -You can essentially create runners for both GitHub and Gitea using the same GARM instance, using the same CLI and the same API. It's simply a matter of adding an endpoint and credentials. The rest is the same as for github. - -## Quickstart - -This is for testing purposes only. We'll assume you're running on an Ubuntu 24.04 VM or server. You can use anything you'd like, but this quickstart is tailored to get you up and running with the LXD provider. So we'll: - -* Initialize LXD -* Create a docker compose yaml -* Deploy Gitea and GARM -* Configure GARM to use Gitea - -You will have to install Docker-CE yourself. - -### Initialize LXD - -If you already have LXD initialized, you can skip this step. Otherwise, simply run: - -```bash -sudo lxd init --auto -``` - -This should set up LXD with default settings that should work on any system. - -LXD and Docker sometimes have issues with networking due to some conflicting iptables rules. In most cases, if you have docker installed and notice that you don't have access to the outside world from the containers, run the following command: - -```bash -sudo iptables -I DOCKER-USER -j ACCEPT -``` - -### Create the docker compose - -Create a docker compose file in `$HOME/compose.yaml`. This docker compose will deploy both gitea and GARM. If you already have a Gitea >=1.24.0, you can edit this docker compose to only deploy GARM. - -```yaml -networks: - default: - external: false - -services: - gitea: - image: docker.gitea.com/gitea:1.24.0-rc0 - container_name: gitea - environment: - - USER_UID=1000 - - USER_GID=1000 - restart: always - networks: - - default - volumes: - - /etc/gitea/gitea:/data - - /etc/timezone:/etc/timezone:ro - - /etc/localtime:/etc/localtime:ro - ports: - - "80:80" - - "22:22" - garm: - image: ghcr.io/cloudbase/garm:${GARM_VERSION:-nightly} - container_name: garm - environment: - - USER_UID=1000 - - USER_GID=1000 - restart: always - networks: - - default - volumes: - - /etc/garm:/etc/garm - - /etc/timezone:/etc/timezone:ro - - /etc/localtime:/etc/localtime:ro - # Give GARM access to the LXD socket. We need this later in the LXD provider. - - /var/snap/lxd/common/lxd/unix.socket:/var/snap/lxd/common/lxd/unix.socket - ports: - - "9997:9997" -``` - -Create the folders for Gitea and GARM: - -```bash -sudo mkdir -p /etc/gitea /etc/garm -sudo chown 1000:1000 /etc/gitea /etc/garm -``` - -Create the GARM configuration file: - -```bash - -sudo tee /etc/garm/config.toml < - - - - - - - - - - diff --git a/doc/images/garm-light.diagram.svg b/doc/images/garm-light.drawio.svg similarity index 100% rename from doc/images/garm-light.diagram.svg rename to doc/images/garm-light.drawio.svg diff --git a/doc/images/garm-light.svg b/doc/images/garm-light.svg deleted file mode 100644 index 2495959d..00000000 --- a/doc/images/garm-light.svg +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - - - - - diff --git a/doc/quickstart.md b/doc/quickstart.md index 889f799b..a2016cbc 100644 --- a/doc/quickstart.md +++ b/doc/quickstart.md @@ -2,18 +2,19 @@ - - [Create the config folder](#create-the-config-folder) - - [The config file](#the-config-file) - - [The provider section](#the-provider-section) - - [Starting the service](#starting-the-service) - - [Using Docker](#using-docker) - - [Setting up GARM as a system service](#setting-up-garm-as-a-system-service) - - [Initializing GARM](#initializing-garm) - - [Setting up the webhook](#setting-up-the-webhook) - - [Creating a GitHub endpoint Optional](#creating-a-github-endpoint-optional) - - [Adding credentials](#adding-credentials) - - [Define a repo](#define-a-repo) - - [Create a pool](#create-a-pool) +- [Quick start](#quick-start) + - [Create the config folder](#create-the-config-folder) + - [The config file](#the-config-file) + - [The provider section](#the-provider-section) + - [Starting the service](#starting-the-service) + - [Using Docker](#using-docker) + - [Setting up GARM as a system service](#setting-up-garm-as-a-system-service) + - [Initializing GARM](#initializing-garm) + - [Setting up the webhook](#setting-up-the-webhook) + - [Creating a GitHub endpoint Optional](#creating-a-github-endpoint-optional) + - [Adding credentials](#adding-credentials) + - [Define a repo](#define-a-repo) + - [Create a pool](#create-a-pool) @@ -61,9 +62,6 @@ time_to_live = "8760h" bind = "0.0.0.0" port = 80 use_tls = false - [apiserver.webui] - # Set this to false if you want to disable the Web UI. - enable = true [database] backend = "sqlite3" @@ -505,7 +503,7 @@ gabriel@rossak:~$ garm-cli pool add \ If we list the pool we should see it: ```bash -gabriel@rock:~$ garm-cli pool ls +gabriel@rock:~$ garm-cli pool ls -a +--------------------------------------+---------------------------+--------------+-----------------+------------------+-------+---------+---------------+----------+ | ID | IMAGE | FLAVOR | TAGS | BELONGS TO | LEVEL | ENABLED | RUNNER PREFIX | PRIORITY | +--------------------------------------+---------------------------+--------------+-----------------+------------------+-------+---------+---------------+----------+ @@ -520,7 +518,7 @@ For the purposes of this guide, we'll increase it to 1 so we have a runner creat First, list current runners: ```bash -gabriel@rossak:~$ garm-cli runner ls +gabriel@rossak:~$ garm-cli runner ls -a +----+------+--------+---------------+---------+ | NR | NAME | STATUS | RUNNER STATUS | POOL ID | +----+------+--------+---------------+---------+ @@ -557,7 +555,7 @@ gabriel@rossak:~$ garm-cli pool update 344e4a72-2035-4a18-a3d5-87bd3874b56c --mi Now if we list the runners: ```bash -gabriel@rossak:~$ garm-cli runner ls +gabriel@rossak:~$ garm-cli runner ls -a +----+-------------------+----------------+---------------+--------------------------------------+ | NR | NAME | STATUS | RUNNER STATUS | POOL ID | +----+-------------------+----------------+---------------+--------------------------------------+ diff --git a/doc/scalesets.md b/doc/scalesets.md deleted file mode 100644 index de9d348e..00000000 --- a/doc/scalesets.md +++ /dev/null @@ -1,93 +0,0 @@ -# Scale Sets - - - -- [Scale Sets](#scale-sets) - - [Create a new scale set](#create-a-new-scale-set) - - [Scale Set vs Pool](#scale-set-vs-pool) - - - -GARM supports [scale sets](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners-with-actions-runner-controller/deploying-runner-scale-sets-with-actions-runner-controller). This new mode of operation was added by GitHub to enable more efficient scheduling of runners using their own ARC (Actions Runner Controller) project. The APIs for enabling scale sets are not yet public and the scale set functionlity itself is not terribly well documented outside the context of ARC, but it can be implemented in third party auto scalers. - -In this document we will focus on how scale sets work, how they are different than pools and how to manage them. - -We'll start with detailing how to create a scale set. - -## Create a new scale set - -Creating a scale set is identical to [creating a pool](/doc/using_garm.md#creating-a-runner-pool), but instead of adding labels to a scale set, it takes a name. We'll assume you already have a provider enabled and you have added a repo, org or enterprise to GARM. - -```bash -ubuntu@garm:~$ garm-cli repo ls -+--------------------------------------+-----------+--------------+------------+------------------+--------------------+------------------+ -| ID | OWNER | NAME | ENDPOINT | CREDENTIALS NAME | POOL BALANCER TYPE | POOL MGR RUNNING | -+--------------------------------------+-----------+--------------+------------+------------------+--------------------+------------------+ -| 84a5e82f-7ab1-427f-8ee0-4569b922296c | gsamfira | garm-testing | github.com | gabriel-samfira | roundrobin | true | -+--------------------------------------+-----------+--------------+------------+------------------+--------------------+------------------+ -``` - -List providers: - -```bash -ubuntu@garm:~$ garm-cli provider list -+--------------+---------------------------------+----------+ -| NAME | DESCRIPTION | TYPE | -+--------------+---------------------------------+----------+ -| incus | Incus external provider | external | -+--------------+---------------------------------+----------+ -| azure | azure provider | external | -+--------------+---------------------------------+----------+ -| aws_ec2 | Amazon EC2 provider | external | -+--------------+---------------------------------+----------+ -``` - -Create a new scale set: - -```bash -garm-cli scaleset add \ - --repo 84a5e82f-7ab1-427f-8ee0-4569b922296c \ - --provider-name incus \ - --image ubuntu:22.04 \ - --name garm-scale-set \ - --flavor default \ - --enabled true \ - --min-idle-runners=0 \ - --max-runners=20 -+--------------------------+-----------------------+ -| FIELD | VALUE | -+--------------------------+-----------------------+ -| ID | 8 | -| Scale Set ID | 14 | -| Scale Name | garm-scale-set | -| Provider Name | incus | -| Image | ubuntu:22.04 | -| Flavor | default | -| OS Type | linux | -| OS Architecture | amd64 | -| Max Runners | 20 | -| Min Idle Runners | 0 | -| Runner Bootstrap Timeout | 20 | -| Belongs to | gsamfira/garm-testing | -| Level | repo | -| Enabled | true | -| Runner Prefix | garm | -| Extra specs | | -| GitHub Runner Group | Default | -+--------------------------+-----------------------+ -``` - -That's it. You now have a scale set created, ready to accept jobs. - -## Scale Set vs Pool - -Scale sets are a new way of managing runners. They were introduced by GitHub to enable more efficient scheduling of runners. Scale sets are meant to reduce API calls, improve reliability of message deliveries to the auto scaler and improve efficiency of runner scheduling. While webhooks work great most of the time, under heavy load, they may not fire or they may fire while the auto scaler is offline, leading to lost messages. If webhooks are fired while GARM is down, we will never know about those jobs unless we query the current workflow runs. - -Listing workflow runs is not feasible for orgs or enterprises, as that would mean listing all repos within an org then for each repository, listing all workflow runs. This gets worse for enterprises. Scale sets on the other hand allow GARM to subscribe to a message queue and get messages just for that scale set over HTTP long poll. - -Advantages of scale sets over pools: - -* No more need to install a webhook, reducing your security footprint. -* Scheduling is done by GitHub. GARM receives runner requests from GitHub and GARM can choose to acquire those jobs or leave them for some other scaler. -* Easier use of runner groups. While GARM supports runner groups, github currently [does not send the group name](https://github.com/orgs/community/discussions/158000) as part of webhooks in `queued` state. This prevents GARM (or any other auto scaler) to efficiently schedule runners to pools that have runner groups set. But given that in the case of scale sets, GitHub schedules the runners to the scaleset itself, we can efficiently create runners in certain runner groups. -* scale set names must be unique within a runner group diff --git a/doc/using_garm.md b/doc/using_garm.md index e7758410..e5e093d3 100644 --- a/doc/using_garm.md +++ b/doc/using_garm.md @@ -567,10 +567,10 @@ ubuntu@garm:~$ garm-cli pool list --repo=be3a0673-56af-4395-9ebf-4521fea67567 If you want to list pools for an organization or enterprise, you can use the `--org` or `--enterprise` options respectively. -In the absence or the `--repo`, `--org` or `--enterprise` options, the command will list all pools in GARM, regardless of the entity they belong to. +You can also list **all** pools from all configureg github entities by using the `--all` option. ```bash -ubuntu@garm:~/garm$ garm-cli pool list +ubuntu@garm:~/garm$ garm-cli pool list --all +--------------------------------------+---------------------------+--------------+-----------------------------------------+------------------+-------+---------+---------------+----------+ | ID | IMAGE | FLAVOR | TAGS | BELONGS TO | LEVEL | ENABLED | RUNNER PREFIX | PRIORITY | +--------------------------------------+---------------------------+--------------+-----------------------------------------+------------------+-------+---------+---------------+----------+ @@ -705,7 +705,7 @@ Awesome! This runner will be able to pick up jobs that match the labels we've se You can list runners for a pool, for a repository, organization or enterprise, or for all of them. To list all runners, you can run: ```bash -ubuntu@garm:~$ garm-cli runner list +ubuntu@garm:~$ garm-cli runner list --all +----+---------------------+---------+---------------+--------------------------------------+ | NR | NAME | STATUS | RUNNER STATUS | POOL ID | +----+---------------------+---------+---------------+--------------------------------------+ diff --git a/go.mod b/go.mod index 1ef71c9d..b60697c7 100644 --- a/go.mod +++ b/go.mod @@ -1,30 +1,34 @@ module github.com/cloudbase/garm -go 1.24.6 +go 1.23.0 + +toolchain go1.23.6 require ( github.com/BurntSushi/toml v1.5.0 github.com/bradleyfalzon/ghinstallation/v2 v2.16.0 - github.com/cloudbase/garm-provider-common v0.1.7 + github.com/cloudbase/garm-provider-common v0.1.6 github.com/felixge/httpsnoop v1.0.4 - github.com/go-openapi/errors v0.22.2 + github.com/go-openapi/errors v0.22.1 github.com/go-openapi/runtime v0.28.0 github.com/go-openapi/strfmt v0.23.0 github.com/go-openapi/swag v0.23.1 - github.com/golang-jwt/jwt/v5 v5.3.0 + github.com/golang-jwt/jwt/v5 v5.2.3 github.com/google/go-github/v72 v72.0.0 github.com/google/uuid v1.6.0 github.com/gorilla/handlers v1.5.2 github.com/gorilla/mux v1.8.1 github.com/gorilla/websocket v1.5.4-0.20240702125206-a62d9d2a8413 - github.com/jedib0t/go-pretty/v6 v6.6.8 + github.com/jedib0t/go-pretty/v6 v6.6.7 + github.com/juju/clock v1.1.1 + github.com/juju/retry v1.0.1 github.com/manifoldco/promptui v0.9.0 github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354 - github.com/prometheus/client_golang v1.23.0 + github.com/pkg/errors v0.9.1 + github.com/prometheus/client_golang v1.22.0 github.com/spf13/cobra v1.9.1 - github.com/stretchr/testify v1.11.0 - golang.org/x/crypto v0.41.0 - golang.org/x/mod v0.27.0 + github.com/stretchr/testify v1.10.0 + golang.org/x/crypto v0.40.0 golang.org/x/oauth2 v0.30.0 golang.org/x/sync v0.16.0 gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0 @@ -32,7 +36,7 @@ require ( gorm.io/datatypes v1.2.6 gorm.io/driver/mysql v1.6.0 gorm.io/driver/sqlite v1.6.0 - gorm.io/gorm v1.30.1 + gorm.io/gorm v1.30.0 ) require ( @@ -45,7 +49,7 @@ require ( github.com/go-logr/logr v1.4.3 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-openapi/analysis v0.23.0 // indirect - github.com/go-openapi/jsonpointer v0.21.2 // indirect + github.com/go-openapi/jsonpointer v0.21.1 // indirect github.com/go-openapi/jsonreference v0.21.0 // indirect github.com/go-openapi/loads v0.22.0 // indirect github.com/go-openapi/spec v0.21.0 // indirect @@ -57,32 +61,34 @@ require ( github.com/jinzhu/inflection v1.0.0 // indirect github.com/jinzhu/now v1.1.5 // indirect github.com/josharian/intern v1.0.0 // indirect + github.com/juju/errors v1.0.0 // indirect + github.com/juju/loggo v1.0.0 // indirect + github.com/juju/testing v1.0.2 // indirect github.com/mailru/easyjson v0.9.0 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-runewidth v0.0.16 // indirect - github.com/mattn/go-sqlite3 v1.14.31 // indirect + github.com/mattn/go-sqlite3 v1.14.28 // indirect github.com/minio/sio v0.4.1 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/oklog/ulid v1.3.1 // indirect github.com/opentracing/opentracing-go v1.2.0 // indirect - github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/prometheus/client_model v0.6.2 // indirect github.com/prometheus/common v0.65.0 // indirect - github.com/prometheus/procfs v0.16.1 // indirect + github.com/prometheus/procfs v0.17.0 // indirect github.com/rivo/uniseg v0.4.7 // indirect github.com/spf13/pflag v1.0.7 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/teris-io/shortid v0.0.0-20220617161101-71ec9f2aa569 // indirect go.mongodb.org/mongo-driver v1.17.4 // indirect go.opentelemetry.io/auto/sdk v1.1.0 // indirect - go.opentelemetry.io/otel v1.36.0 // indirect - go.opentelemetry.io/otel/metric v1.36.0 // indirect - go.opentelemetry.io/otel/trace v1.36.0 // indirect + go.opentelemetry.io/otel v1.37.0 // indirect + go.opentelemetry.io/otel/metric v1.37.0 // indirect + go.opentelemetry.io/otel/trace v1.37.0 // indirect golang.org/x/net v0.42.0 // indirect - golang.org/x/sys v0.35.0 // indirect - golang.org/x/text v0.28.0 // indirect + golang.org/x/sys v0.34.0 // indirect + golang.org/x/text v0.27.0 // indirect google.golang.org/protobuf v1.36.6 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index ef3ada85..aa948fc0 100644 --- a/go.sum +++ b/go.sum @@ -19,8 +19,8 @@ github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObk github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/chzyer/test v1.0.0 h1:p3BQDXSxOhOG0P9z6/hGnII4LGiEPOYBhs8asl/fC04= github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8= -github.com/cloudbase/garm-provider-common v0.1.7 h1:V0upTejFRDiyFBO4hhkMWmPtmRTguyOt/4i1u9/rfbg= -github.com/cloudbase/garm-provider-common v0.1.7/go.mod h1:2O51WbcfqRx5fDHyyJgIFq7KdTZZnefsM+aoOchyleU= +github.com/cloudbase/garm-provider-common v0.1.6 h1:wLqolRkUD2Z4rzuBLDs2exL1Aq+eJ5RBVnRvk5JP6fs= +github.com/cloudbase/garm-provider-common v0.1.6/go.mod h1:2O51WbcfqRx5fDHyyJgIFq7KdTZZnefsM+aoOchyleU= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= @@ -34,10 +34,10 @@ github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-openapi/analysis v0.23.0 h1:aGday7OWupfMs+LbmLZG4k0MYXIANxcuBTYUC03zFCU= github.com/go-openapi/analysis v0.23.0/go.mod h1:9mz9ZWaSlV8TvjQHLl2mUW2PbZtemkE8yA5v22ohupo= -github.com/go-openapi/errors v0.22.2 h1:rdxhzcBUazEcGccKqbY1Y7NS8FDcMyIRr0934jrYnZg= -github.com/go-openapi/errors v0.22.2/go.mod h1:+n/5UdIqdVnLIJ6Q9Se8HNGUXYaY6CN8ImWzfi/Gzp0= -github.com/go-openapi/jsonpointer v0.21.2 h1:AqQaNADVwq/VnkCmQg6ogE+M3FOsKTytwges0JdwVuA= -github.com/go-openapi/jsonpointer v0.21.2/go.mod h1:50I1STOfbY1ycR8jGz8DaMeLCdXiI6aDteEdRNNzpdk= +github.com/go-openapi/errors v0.22.1 h1:kslMRRnK7NCb/CvR1q1VWuEQCEIsBGn5GgKD9e+HYhU= +github.com/go-openapi/errors v0.22.1/go.mod h1:+n/5UdIqdVnLIJ6Q9Se8HNGUXYaY6CN8ImWzfi/Gzp0= +github.com/go-openapi/jsonpointer v0.21.1 h1:whnzv/pNXtK2FbX/W9yJfRmE2gsmkfahjMKB0fZvcic= +github.com/go-openapi/jsonpointer v0.21.1/go.mod h1:50I1STOfbY1ycR8jGz8DaMeLCdXiI6aDteEdRNNzpdk= github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ= github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4= github.com/go-openapi/loads v0.22.0 h1:ECPGd4jX1U6NApCGG1We+uEozOAvXvJSF4nnwHZ8Aco= @@ -56,8 +56,8 @@ github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1 github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU= github.com/golang-jwt/jwt/v4 v4.5.2 h1:YtQM7lnr8iZ+j5q71MGKkNw9Mn7AjHM68uc9g5fXeUI= github.com/golang-jwt/jwt/v4 v4.5.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= -github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= -github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= +github.com/golang-jwt/jwt/v5 v5.2.3 h1:kkGXqQOBSDDWRhWNXTFpqGSCMyh/PLnqUvMGJPDJDs0= +github.com/golang-jwt/jwt/v5 v5.2.3/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA= github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A= @@ -87,14 +87,27 @@ github.com/jackc/pgx/v5 v5.5.5 h1:amBjrZVmksIdNjxGW/IiIMzxMKZFelXbUoPNb+8sjQw= github.com/jackc/pgx/v5 v5.5.5/go.mod h1:ez9gk+OAat140fv9ErkZDYFWmXLfV+++K0uAOiwgm1A= github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk= github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= -github.com/jedib0t/go-pretty/v6 v6.6.8 h1:JnnzQeRz2bACBobIaa/r+nqjvws4yEhcmaZ4n1QzsEc= -github.com/jedib0t/go-pretty/v6 v6.6.8/go.mod h1:YwC5CE4fJ1HFUDeivSV1r//AmANFHyqczZk+U6BDALU= +github.com/jedib0t/go-pretty/v6 v6.6.7 h1:m+LbHpm0aIAPLzLbMfn8dc3Ht8MW7lsSO4MPItz/Uuo= +github.com/jedib0t/go-pretty/v6 v6.6.7/go.mod h1:YwC5CE4fJ1HFUDeivSV1r//AmANFHyqczZk+U6BDALU= github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/juju/ansiterm v0.0.0-20180109212912-720a0952cc2a/go.mod h1:UJSiEoRfvx3hP73CvoARgeLjaIOjybY9vj8PUPPFGeU= +github.com/juju/clock v1.1.1 h1:NvgHG9DQmOpBevgt6gzkyimdWBooLXDy1cQn89qJzBI= +github.com/juju/clock v1.1.1/go.mod h1:HIBvJ8kiV/n7UHwKuCkdYL4l/MDECztHR2sAvWDxxf0= +github.com/juju/errors v1.0.0 h1:yiq7kjCLll1BiaRuNY53MGI0+EQ3rF6GB+wvboZDefM= +github.com/juju/errors v1.0.0/go.mod h1:B5x9thDqx0wIMH3+aLIMP9HjItInYWObRovoCFM5Qe8= +github.com/juju/loggo v1.0.0 h1:Y6ZMQOGR9Aj3BGkiWx7HBbIx6zNwNkxhVNOHU2i1bl0= +github.com/juju/loggo v1.0.0/go.mod h1:NIXFioti1SmKAlKNuUwbMenNdef59IF52+ZzuOmHYkg= +github.com/juju/retry v1.0.1 h1:EVwOPq273wO1o0BCU7Ay7XE/bNb+bTNYsCK6y+BboAk= +github.com/juju/retry v1.0.1/go.mod h1:SssN1eYeK3A2qjnFGTiVMbdzGJ2BfluaJblJXvuvgqA= +github.com/juju/testing v1.0.2 h1:OR90RqCd9CJONxXamZAjLknpZdtqDyxqW8IwCbgw3i4= +github.com/juju/testing v1.0.2/go.mod h1:h3Vd2rzB57KrdsBEy6R7bmSKPzP76BnNavt7i8PerwQ= +github.com/juju/utils/v3 v3.0.0 h1:Gg3n63mGPbBuoXCo+EPJuMi44hGZfloI8nlCIebHu2Q= +github.com/juju/utils/v3 v3.0.0/go.mod h1:8csUcj1VRkfjNIRzBFWzLFCMLwLqsRWvkmhfVAUwbC4= github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= @@ -103,16 +116,19 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/lunixbochs/vtclean v0.0.0-20160125035106-4fbf7632a2c6/go.mod h1:pHhQNgMf3btfWnGBVipUOjRYhoOsdGqdm/+2c2E2WMI= github.com/mailru/easyjson v0.9.0 h1:PrnmzHw7262yW8sTBwxi1PdJA3Iw/EKBa8psRf7d9a4= github.com/mailru/easyjson v0.9.0/go.mod h1:1+xMtQp2MRNVL/V1bOzuP3aP8VNwRW55fQUto+XFtTU= github.com/manifoldco/promptui v0.9.0 h1:3V4HzJk1TtXW1MTZMP7mdlwbBpIinw3HztaIlYthEiA= github.com/manifoldco/promptui v0.9.0/go.mod h1:ka04sppxSGFAtxX0qhlYQjISsg9mR4GWtQEhdbn6Pgg= +github.com/mattn/go-colorable v0.0.6/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-isatty v0.0.0-20160806122752-66b8e73f3f5c/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= -github.com/mattn/go-sqlite3 v1.14.31 h1:ldt6ghyPJsokUIlksH63gWZkG6qVGeEAu4zLeS4aVZM= -github.com/mattn/go-sqlite3 v1.14.31/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= +github.com/mattn/go-sqlite3 v1.14.28 h1:ThEiQrnbtumT+QMknw63Befp/ce/nUPgBPMlRFEum7A= +github.com/mattn/go-sqlite3 v1.14.28/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= github.com/microsoft/go-mssqldb v1.7.2 h1:CHkFJiObW7ItKTJfHo1QX7QBBD1iV+mn1eOyRP3b/PA= github.com/microsoft/go-mssqldb v1.7.2/go.mod h1:kOvZKUdrhhFQmxLZqbwUV0rHkNkZpthMITIb2Ko1IoA= github.com/minio/sio v0.4.1 h1:EMe3YBC1nf+sRQia65Rutxi+Z554XPV0dt8BIBA+a/0= @@ -132,14 +148,14 @@ github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/prometheus/client_golang v1.23.0 h1:ust4zpdl9r4trLY/gSjlm07PuiBq2ynaXXlptpfy8Uc= -github.com/prometheus/client_golang v1.23.0/go.mod h1:i/o0R9ByOnHX0McrTMTyhYvKE4haaf2mW08I+jGAjEE= +github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/Y92Vm0Zc6Q= +github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0= github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk= github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE= github.com/prometheus/common v0.65.0 h1:QDwzd+G1twt//Kwj/Ww6E9FQq1iVMmODnILtW1t2VzE= github.com/prometheus/common v0.65.0/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8= -github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg= -github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is= +github.com/prometheus/procfs v0.17.0 h1:FuLQ+05u4ZI+SS/w9+BWEM2TXiHKsUQ9TADiRH7DuK0= +github.com/prometheus/procfs v0.17.0/go.mod h1:oPQLaDAMRbA+u8H5Pbfq+dl3VDAvHxMUOVhe0wYB2zw= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= @@ -156,28 +172,24 @@ github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.1.4/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.11.0 h1:ib4sjIrwZKxE5u/Japgo/7SJV3PvgjGiRNAvTVGqQl8= -github.com/stretchr/testify v1.11.0/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/teris-io/shortid v0.0.0-20220617161101-71ec9f2aa569 h1:xzABM9let0HLLqFypcxvLmlvEciCHL7+Lv+4vwZqecI= github.com/teris-io/shortid v0.0.0-20220617161101-71ec9f2aa569/go.mod h1:2Ly+NIftZN4de9zRmENdYbvPQeaVIYKWpLFStLFEBgI= go.mongodb.org/mongo-driver v1.17.4 h1:jUorfmVzljjr0FLzYQsGP8cgN/qzzxlY9Vh0C9KFXVw= go.mongodb.org/mongo-driver v1.17.4/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= -go.opentelemetry.io/otel v1.36.0 h1:UumtzIklRBY6cI/lllNZlALOF5nNIzJVb16APdvgTXg= -go.opentelemetry.io/otel v1.36.0/go.mod h1:/TcFMXYjyRNh8khOAO9ybYkqaDBb/70aVwkNML4pP8E= -go.opentelemetry.io/otel/metric v1.36.0 h1:MoWPKVhQvJ+eeXWHFBOPoBOi20jh6Iq2CcCREuTYufE= -go.opentelemetry.io/otel/metric v1.36.0/go.mod h1:zC7Ks+yeyJt4xig9DEw9kuUFe5C3zLbVjV2PzT6qzbs= +go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ= +go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I= +go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE= +go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E= go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucgoDw= go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg= -go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w= -go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA= -go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= -go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= -golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4= -golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc= -golang.org/x/mod v0.27.0 h1:kb+q2PyFnEADO2IEF935ehFUXlWiNjJWtRNgBLSfbxQ= -golang.org/x/mod v0.27.0/go.mod h1:rWI627Fq0DEoudcK+MBkNkCe0EetEaDSwJJkCcjpazc= +go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4= +go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0= +golang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM= +golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY= golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs= golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8= golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= @@ -187,20 +199,23 @@ golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI= -golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= -golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng= -golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= +golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA= +golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4= +golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0 h1:FVCohIoYO7IJoDDVpV2pdq7SgrMH6wHnuTyrdrxJNoY= gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0/go.mod h1:OdE7CF6DbADk7lN8LIKRzRJTTZXIjtWgA5THM5lhBAw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20160105164936-4f90aeace3a2/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc= gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gorm.io/datatypes v1.2.6 h1:KafLdXvFUhzNeL2ncm03Gl3eTLONQfNKZ+wJ+9Y4Nck= @@ -213,5 +228,5 @@ gorm.io/driver/sqlite v1.6.0 h1:WHRRrIiulaPiPFmDcod6prc4l2VGVWHz80KspNsxSfQ= gorm.io/driver/sqlite v1.6.0/go.mod h1:AO9V1qIQddBESngQUKWL9yoH93HIeA1X6V633rBwyT8= gorm.io/driver/sqlserver v1.6.0 h1:VZOBQVsVhkHU/NzNhRJKoANt5pZGQAS1Bwc6m6dgfnc= gorm.io/driver/sqlserver v1.6.0/go.mod h1:WQzt4IJo/WHKnckU9jXBLMJIVNMVeTu25dnOzehntWw= -gorm.io/gorm v1.30.1 h1:lSHg33jJTBxs2mgJRfRZeLDG+WZaHYCk3Wtfl6Ngzo4= -gorm.io/gorm v1.30.1/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= +gorm.io/gorm v1.30.0 h1:qbT5aPv1UH8gI99OsRlvDToLxW5zR7FzS9acZDOZcgs= +gorm.io/gorm v1.30.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= diff --git a/internal/testing/mock_watcher.go b/internal/testing/mock_watcher.go index 112f0de5..67ae5da4 100644 --- a/internal/testing/mock_watcher.go +++ b/internal/testing/mock_watcher.go @@ -1,20 +1,6 @@ //go:build testing // +build testing -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package testing import ( diff --git a/internal/testing/testing.go b/internal/testing/testing.go index 38725882..1b937b6c 100644 --- a/internal/testing/testing.go +++ b/internal/testing/testing.go @@ -19,13 +19,13 @@ package testing import ( "context" - "errors" "fmt" "os" "path/filepath" "sort" "testing" + "github.com/pkg/errors" "github.com/stretchr/testify/require" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -85,32 +85,7 @@ func CreateGARMTestUser(ctx context.Context, username string, db common.Store, s return user } -func CreateGHESEndpoint(ctx context.Context, db common.Store, s *testing.T) params.ForgeEndpoint { - endpointParams := params.CreateGithubEndpointParams{ - Name: "ghes.example.com", - Description: "GHES endpoint", - APIBaseURL: "https://ghes.example.com", - UploadBaseURL: "https://upload.ghes.example.com/", - BaseURL: "https://ghes.example.com", - } - - ep, err := db.GetGithubEndpoint(ctx, endpointParams.Name) - if err != nil { - if !errors.Is(err, runnerErrors.ErrNotFound) { - s.Fatalf("failed to get database object (%s): %v", endpointParams.Name, err) - } - ep, err = db.CreateGithubEndpoint(ctx, endpointParams) - if err != nil { - if !errors.Is(err, runnerErrors.ErrDuplicateEntity) { - s.Fatalf("failed to create database object (%s): %v", endpointParams.Name, err) - } - } - } - - return ep -} - -func CreateDefaultGithubEndpoint(ctx context.Context, db common.Store, s *testing.T) params.ForgeEndpoint { +func CreateDefaultGithubEndpoint(ctx context.Context, db common.Store, s *testing.T) params.GithubEndpoint { endpointParams := params.CreateGithubEndpointParams{ Name: "github.com", Description: "github endpoint", @@ -135,35 +110,11 @@ func CreateDefaultGithubEndpoint(ctx context.Context, db common.Store, s *testin return ep } -func CreateDefaultGiteaEndpoint(ctx context.Context, db common.Store, s *testing.T) params.ForgeEndpoint { - endpointParams := params.CreateGiteaEndpointParams{ - Name: "gitea.example.com", - Description: "gitea endpoint", - APIBaseURL: "https://gitea.example.com/", - BaseURL: "https://gitea.example.com/", - } - - ep, err := db.GetGithubEndpoint(ctx, endpointParams.Name) - if err != nil { - if !errors.Is(err, runnerErrors.ErrNotFound) { - s.Fatalf("failed to get database object (github.com): %v", err) - } - ep, err = db.CreateGiteaEndpoint(ctx, endpointParams) - if err != nil { - if !errors.Is(err, runnerErrors.ErrDuplicateEntity) { - s.Fatalf("failed to create database object (github.com): %v", err) - } - } - } - - return ep -} - -func CreateTestGithubCredentials(ctx context.Context, credsName string, db common.Store, s *testing.T, endpoint params.ForgeEndpoint) params.ForgeCredentials { +func CreateTestGithubCredentials(ctx context.Context, credsName string, db common.Store, s *testing.T, endpoint params.GithubEndpoint) params.GithubCredentials { newCredsParams := params.CreateGithubCredentialsParams{ Name: credsName, Description: "Test creds", - AuthType: params.ForgeAuthTypePAT, + AuthType: params.GithubAuthTypePAT, Endpoint: endpoint.Name, PAT: params.GithubPAT{ OAuth2Token: "test-token", @@ -176,23 +127,6 @@ func CreateTestGithubCredentials(ctx context.Context, credsName string, db commo return newCreds } -func CreateTestGiteaCredentials(ctx context.Context, credsName string, db common.Store, s *testing.T, endpoint params.ForgeEndpoint) params.ForgeCredentials { - newCredsParams := params.CreateGiteaCredentialsParams{ - Name: credsName, - Description: "Test creds", - AuthType: params.ForgeAuthTypePAT, - Endpoint: endpoint.Name, - PAT: params.GithubPAT{ - OAuth2Token: "test-token", - }, - } - newCreds, err := db.CreateGiteaCredentials(ctx, newCredsParams) - if err != nil { - s.Fatalf("failed to create database object (%s): %v", credsName, err) - } - return newCreds -} - func GetTestSqliteDBConfig(t *testing.T) config.Database { dir, err := os.MkdirTemp("", "garm-config-test") if err != nil { @@ -219,10 +153,6 @@ type NameAndIDDBEntity interface { GetName() string } -func Ptr[T any](v T) *T { - return &v -} - func EqualDBEntityByName[T NameAndIDDBEntity](t *testing.T, expected, actual []T) { require.Equal(t, len(expected), len(actual)) diff --git a/locking/interface.go b/locking/interface.go deleted file mode 100644 index 43ed1737..00000000 --- a/locking/interface.go +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package locking - -import "time" - -type Locker interface { - TryLock(key, identifier string) bool - Lock(key, identifier string) - LockedBy(key string) (string, bool) - Unlock(key string, remove bool) - Delete(key string) -} - -type InstanceDeleteBackoff interface { - ShouldProcess(key string) (bool, time.Time) - Delete(key string) - RecordFailure(key string) -} diff --git a/locking/local_backoff_locker_test.go b/locking/local_backoff_locker_test.go deleted file mode 100644 index 00fe09c8..00000000 --- a/locking/local_backoff_locker_test.go +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package locking - -import ( - "testing" - "time" - - "github.com/stretchr/testify/suite" -) - -type LockerBackoffTestSuite struct { - suite.Suite - - locker *instanceDeleteBackoff -} - -func (l *LockerBackoffTestSuite) SetupTest() { - l.locker = &instanceDeleteBackoff{} -} - -func (l *LockerBackoffTestSuite) TearDownTest() { - l.locker = nil -} - -func (l *LockerBackoffTestSuite) TestShouldProcess() { - shouldProcess, deadline := l.locker.ShouldProcess("test") - l.Require().True(shouldProcess) - l.Require().Equal(time.Time{}, deadline) - - l.locker.muxes.Store("test", &instanceBackOff{ - backoffSeconds: 0, - lastRecordedFailureTime: time.Time{}, - }) - - shouldProcess, deadline = l.locker.ShouldProcess("test") - l.Require().True(shouldProcess) - l.Require().Equal(time.Time{}, deadline) - - l.locker.muxes.Store("test", &instanceBackOff{ - backoffSeconds: 100, - lastRecordedFailureTime: time.Now().UTC(), - }) - - shouldProcess, deadline = l.locker.ShouldProcess("test") - l.Require().False(shouldProcess) - l.Require().NotEqual(time.Time{}, deadline) -} - -func (l *LockerBackoffTestSuite) TestRecordFailure() { - l.locker.RecordFailure("test") - - mux, ok := l.locker.muxes.Load("test") - l.Require().True(ok) - ib := mux.(*instanceBackOff) - l.Require().NotNil(ib) - l.Require().NotEqual(time.Time{}, ib.lastRecordedFailureTime) - l.Require().Equal(float64(5), ib.backoffSeconds) - - l.locker.RecordFailure("test") - mux, ok = l.locker.muxes.Load("test") - l.Require().True(ok) - ib = mux.(*instanceBackOff) - l.Require().NotNil(ib) - l.Require().NotEqual(time.Time{}, ib.lastRecordedFailureTime) - l.Require().Equal(7.5, ib.backoffSeconds) - - l.locker.Delete("test") - mux, ok = l.locker.muxes.Load("test") - l.Require().False(ok) - l.Require().Nil(mux) -} - -func TestBackoffTestSuite(t *testing.T) { - t.Parallel() - suite.Run(t, new(LockerBackoffTestSuite)) -} diff --git a/locking/local_locker.go b/locking/local_locker.go deleted file mode 100644 index 312d85ec..00000000 --- a/locking/local_locker.go +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package locking - -import ( - "context" - "sync" - - dbCommon "github.com/cloudbase/garm/database/common" -) - -const ( - maxBackoffSeconds float64 = 1200 // 20 minutes -) - -func NewLocalLocker(_ context.Context, _ dbCommon.Store) (Locker, error) { - return &keyMutex{}, nil -} - -type keyMutex struct { - muxes sync.Map -} - -type lockWithIdent struct { - mux sync.Mutex - ident string -} - -var _ Locker = &keyMutex{} - -func (k *keyMutex) TryLock(key, identifier string) bool { - mux, _ := k.muxes.LoadOrStore(key, &lockWithIdent{ - mux: sync.Mutex{}, - }) - keyMux := mux.(*lockWithIdent) - locked := keyMux.mux.TryLock() - if locked { - keyMux.ident = identifier - } - return locked -} - -func (k *keyMutex) Lock(key, identifier string) { - mux, _ := k.muxes.LoadOrStore(key, &lockWithIdent{ - mux: sync.Mutex{}, - }) - keyMux := mux.(*lockWithIdent) - keyMux.ident = identifier - keyMux.mux.Lock() -} - -func (k *keyMutex) Unlock(key string, remove bool) { - mux, ok := k.muxes.Load(key) - if !ok { - return - } - keyMux := mux.(*lockWithIdent) - if remove { - k.Delete(key) - } - keyMux.ident = "" - keyMux.mux.Unlock() -} - -func (k *keyMutex) Delete(key string) { - k.muxes.Delete(key) -} - -func (k *keyMutex) LockedBy(key string) (string, bool) { - mux, ok := k.muxes.Load(key) - if !ok { - return "", false - } - keyMux := mux.(*lockWithIdent) - if keyMux.ident == "" { - return "", false - } - - return keyMux.ident, true -} diff --git a/locking/local_locker_test.go b/locking/local_locker_test.go deleted file mode 100644 index 75b4dac0..00000000 --- a/locking/local_locker_test.go +++ /dev/null @@ -1,241 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package locking - -import ( - "testing" - - "github.com/stretchr/testify/suite" -) - -type LockerTestSuite struct { - suite.Suite - - mux *keyMutex -} - -func (l *LockerTestSuite) SetupTest() { - l.mux = &keyMutex{} - err := RegisterLocker(l.mux) - l.Require().NoError(err, "should register the locker") -} - -func (l *LockerTestSuite) TearDownTest() { - l.mux = nil - locker = nil -} - -func (l *LockerTestSuite) TestLocalLockerLockUnlock() { - l.mux.Lock("test", "test-identifier") - mux, ok := l.mux.muxes.Load("test") - l.Require().True(ok) - keyMux := mux.(*lockWithIdent) - l.Require().Equal("test-identifier", keyMux.ident) - l.mux.Unlock("test", true) - mux, ok = l.mux.muxes.Load("test") - l.Require().False(ok) - l.Require().Nil(mux) - l.mux.Unlock("test", false) -} - -func (l *LockerTestSuite) TestLocalLockerTryLock() { - locked := l.mux.TryLock("test", "test-identifier") - l.Require().True(locked) - mux, ok := l.mux.muxes.Load("test") - l.Require().True(ok) - keyMux := mux.(*lockWithIdent) - l.Require().Equal("test-identifier", keyMux.ident) - - locked = l.mux.TryLock("test", "another-identifier2") - l.Require().False(locked) - mux, ok = l.mux.muxes.Load("test") - l.Require().True(ok) - keyMux = mux.(*lockWithIdent) - l.Require().Equal("test-identifier", keyMux.ident) - - l.mux.Unlock("test", true) - locked = l.mux.TryLock("test", "another-identifier2") - l.Require().True(locked) - mux, ok = l.mux.muxes.Load("test") - l.Require().True(ok) - keyMux = mux.(*lockWithIdent) - l.Require().Equal("another-identifier2", keyMux.ident) - l.mux.Unlock("test", true) -} - -func (l *LockerTestSuite) TestLocalLockertLockedBy() { - l.mux.Lock("test", "test-identifier") - identifier, ok := l.mux.LockedBy("test") - l.Require().True(ok) - l.Require().Equal("test-identifier", identifier) - l.mux.Unlock("test", true) - identifier, ok = l.mux.LockedBy("test") - l.Require().False(ok) - l.Require().Equal("", identifier) - - l.mux.Lock("test", "test-identifier") - identifier, ok = l.mux.LockedBy("test") - l.Require().True(ok) - l.Require().Equal("test-identifier", identifier) - l.mux.Unlock("test", false) - identifier, ok = l.mux.LockedBy("test") - l.Require().False(ok) - l.Require().Equal("", identifier) -} - -func (l *LockerTestSuite) TestLockerPanicsIfNotInitialized() { - locker = nil - l.Require().Panics( - func() { - Lock("test", "test-identifier") - }, - "Lock should panic if locker is not initialized", - ) - - l.Require().Panics( - func() { - TryLock("test", "test-identifier") - }, - "TryLock should panic if locker is not initialized", - ) - - l.Require().Panics( - func() { - Unlock("test", false) - }, - "Unlock should panic if locker is not initialized", - ) - - l.Require().Panics( - func() { - Delete("test") - }, - "Delete should panic if locker is not initialized", - ) - - l.Require().Panics( - func() { - LockedBy("test") - }, - "LockedBy should panic if locker is not initialized", - ) -} - -func (l *LockerTestSuite) TestLockerAlreadyRegistered() { - err := RegisterLocker(l.mux) - l.Require().Error(err, "should not be able to register the same locker again") - l.Require().Equal("locker already registered", err.Error()) -} - -func (l *LockerTestSuite) TestLockerDelete() { - Lock("test", "test-identifier") - mux, ok := l.mux.muxes.Load("test") - l.Require().True(ok) - keyMux := mux.(*lockWithIdent) - l.Require().Equal("test-identifier", keyMux.ident) - - Delete("test") - mux, ok = l.mux.muxes.Load("test") - l.Require().False(ok) - l.Require().Nil(mux) - - identifier, ok := l.mux.LockedBy("test") - l.Require().False(ok) - l.Require().Equal("", identifier) -} - -func (l *LockerTestSuite) TestLockUnlock() { - Lock("test", "test-identifier") - mux, ok := l.mux.muxes.Load("test") - l.Require().True(ok) - keyMux := mux.(*lockWithIdent) - l.Require().Equal("test-identifier", keyMux.ident) - - Unlock("test", true) - mux, ok = l.mux.muxes.Load("test") - l.Require().False(ok) - l.Require().Nil(mux) - - identifier, ok := l.mux.LockedBy("test") - l.Require().False(ok) - l.Require().Equal("", identifier) -} - -func (l *LockerTestSuite) TestLockUnlockWithoutRemove() { - Lock("test", "test-identifier") - mux, ok := l.mux.muxes.Load("test") - l.Require().True(ok) - keyMux := mux.(*lockWithIdent) - l.Require().Equal("test-identifier", keyMux.ident) - - Unlock("test", false) - mux, ok = l.mux.muxes.Load("test") - l.Require().True(ok) - keyMux = mux.(*lockWithIdent) - l.Require().Equal("", keyMux.ident) - - identifier, ok := l.mux.LockedBy("test") - l.Require().False(ok) - l.Require().Equal("", identifier) -} - -func (l *LockerTestSuite) TestTryLock() { - locked := TryLock("test", "test-identifier") - l.Require().True(locked) - mux, ok := l.mux.muxes.Load("test") - l.Require().True(ok) - keyMux := mux.(*lockWithIdent) - l.Require().Equal("test-identifier", keyMux.ident) - - locked = TryLock("test", "another-identifier2") - l.Require().False(locked) - mux, ok = l.mux.muxes.Load("test") - l.Require().True(ok) - keyMux = mux.(*lockWithIdent) - l.Require().Equal("test-identifier", keyMux.ident) - - Unlock("test", true) - locked = TryLock("test", "another-identifier2") - l.Require().True(locked) - mux, ok = l.mux.muxes.Load("test") - l.Require().True(ok) - keyMux = mux.(*lockWithIdent) - l.Require().Equal("another-identifier2", keyMux.ident) - Unlock("test", true) -} - -func (l *LockerTestSuite) TestLockedBy() { - Lock("test", "test-identifier") - identifier, ok := LockedBy("test") - l.Require().True(ok) - l.Require().Equal("test-identifier", identifier) - Unlock("test", true) - identifier, ok = LockedBy("test") - l.Require().False(ok) - l.Require().Equal("", identifier) - - Lock("test", "test-identifier2") - identifier, ok = LockedBy("test") - l.Require().True(ok) - l.Require().Equal("test-identifier2", identifier) - Unlock("test", false) - identifier, ok = LockedBy("test") - l.Require().False(ok) - l.Require().Equal("", identifier) -} - -func TestLockerTestSuite(t *testing.T) { - t.Parallel() - suite.Run(t, new(LockerTestSuite)) -} diff --git a/locking/locking.go b/locking/locking.go deleted file mode 100644 index 312d2e6a..00000000 --- a/locking/locking.go +++ /dev/null @@ -1,90 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package locking - -import ( - "fmt" - "log/slog" - "runtime" - "sync" -) - -var locker Locker - -var lockerMux = sync.Mutex{} - -func TryLock(key, identifier string) (ok bool) { - if locker == nil { - panic("no locker is registered") - } - - _, filename, line, _ := runtime.Caller(1) - slog.Debug("attempting to try lock", "key", key, "identifier", identifier, "caller", fmt.Sprintf("%s:%d", filename, line)) - defer slog.Debug("try lock returned", "key", key, "identifier", identifier, "locked", ok, "caller", fmt.Sprintf("%s:%d", filename, line)) - - ok = locker.TryLock(key, identifier) - return ok -} - -func Lock(key, identifier string) { - if locker == nil { - panic("no locker is registered") - } - - _, filename, line, _ := runtime.Caller(1) - slog.Debug("attempting to lock", "key", key, "identifier", identifier, "caller", fmt.Sprintf("%s:%d", filename, line)) - defer slog.Debug("lock acquired", "key", key, "identifier", identifier, "caller", fmt.Sprintf("%s:%d", filename, line)) - - locker.Lock(key, identifier) -} - -func Unlock(key string, remove bool) { - if locker == nil { - panic("no locker is registered") - } - - _, filename, line, _ := runtime.Caller(1) - slog.Debug("attempting to unlock", "key", key, "remove", remove, "caller", fmt.Sprintf("%s:%d", filename, line)) - defer slog.Debug("unlock completed", "key", key, "remove", remove, "caller", fmt.Sprintf("%s:%d", filename, line)) - locker.Unlock(key, remove) -} - -func LockedBy(key string) (string, bool) { - if locker == nil { - panic("no locker is registered") - } - - return locker.LockedBy(key) -} - -func Delete(key string) { - if locker == nil { - panic("no locker is registered") - } - - locker.Delete(key) -} - -func RegisterLocker(lock Locker) error { - lockerMux.Lock() - defer lockerMux.Unlock() - - if locker != nil { - return fmt.Errorf("locker already registered") - } - - locker = lock - return nil -} diff --git a/metrics/enterprise.go b/metrics/enterprise.go index 882b64df..f8382edf 100644 --- a/metrics/enterprise.go +++ b/metrics/enterprise.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package metrics import ( diff --git a/metrics/github.go b/metrics/github.go index 0d6f5fa7..0c050652 100644 --- a/metrics/github.go +++ b/metrics/github.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package metrics import "github.com/prometheus/client_golang/prometheus" diff --git a/metrics/health.go b/metrics/health.go index 13194231..4acfbb36 100644 --- a/metrics/health.go +++ b/metrics/health.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package metrics import ( diff --git a/metrics/instance.go b/metrics/instance.go index b9d7e1cf..7c2f2f96 100644 --- a/metrics/instance.go +++ b/metrics/instance.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package metrics import ( diff --git a/metrics/metrics.go b/metrics/metrics.go index 1a566116..edceb30a 100644 --- a/metrics/metrics.go +++ b/metrics/metrics.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package metrics import ( diff --git a/metrics/organization.go b/metrics/organization.go index d04e7a4e..38d7c611 100644 --- a/metrics/organization.go +++ b/metrics/organization.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package metrics import ( diff --git a/metrics/pool.go b/metrics/pool.go index fc6f2520..5803af90 100644 --- a/metrics/pool.go +++ b/metrics/pool.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package metrics import ( diff --git a/metrics/provider.go b/metrics/provider.go index 3262ab3b..8285ca1e 100644 --- a/metrics/provider.go +++ b/metrics/provider.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package metrics import ( diff --git a/metrics/repository.go b/metrics/repository.go index 21714233..a84dd120 100644 --- a/metrics/repository.go +++ b/metrics/repository.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package metrics import ( diff --git a/metrics/util.go b/metrics/util.go index d83b4973..b2edb580 100644 --- a/metrics/util.go +++ b/metrics/util.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package metrics func Bool2float64(b bool) float64 { diff --git a/metrics/webhooks.go b/metrics/webhooks.go index 48a08f9c..839219a4 100644 --- a/metrics/webhooks.go +++ b/metrics/webhooks.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package metrics import "github.com/prometheus/client_golang/prometheus" diff --git a/params/github.go b/params/github.go index 08f7b409..cb9cecf0 100644 --- a/params/github.go +++ b/params/github.go @@ -14,16 +14,7 @@ package params -import ( - "encoding/base64" - "encoding/json" - "fmt" - "net/url" - "time" - - jwt "github.com/golang-jwt/jwt/v5" - "github.com/google/uuid" -) +import "time" type Event string @@ -171,9 +162,7 @@ type WorkflowJob struct { DefaultBranch string `json:"default_branch"` } `json:"repository"` Organization struct { - Login string `json:"login"` - // Name is a gitea specific field - Name string `json:"name"` + Login string `json:"login"` ID int64 `json:"id"` NodeID string `json:"node_id"` URL string `json:"url"` @@ -219,351 +208,3 @@ type WorkflowJob struct { SiteAdmin bool `json:"site_admin"` } `json:"sender"` } - -func (w WorkflowJob) GetOrgName(forgeType EndpointType) string { - if forgeType == GiteaEndpointType { - return w.Organization.Name - } - return w.Organization.Login -} - -type RunnerSetting struct { - Ephemeral bool `json:"ephemeral,omitempty"` - IsElastic bool `json:"isElastic,omitempty"` - DisableUpdate bool `json:"disableUpdate,omitempty"` -} - -type Label struct { - Type string `json:"type"` - Name string `json:"name"` -} - -type RunnerScaleSetStatistic struct { - TotalAvailableJobs int `json:"totalAvailableJobs"` - TotalAcquiredJobs int `json:"totalAcquiredJobs"` - TotalAssignedJobs int `json:"totalAssignedJobs"` - TotalRunningJobs int `json:"totalRunningJobs"` - TotalRegisteredRunners int `json:"totalRegisteredRunners"` - TotalBusyRunners int `json:"totalBusyRunners"` - TotalIdleRunners int `json:"totalIdleRunners"` -} - -type RunnerScaleSet struct { - ID int `json:"id,omitempty"` - Name string `json:"name,omitempty"` - RunnerGroupID int64 `json:"runnerGroupId,omitempty"` - RunnerGroupName string `json:"runnerGroupName,omitempty"` - Labels []Label `json:"labels,omitempty"` - RunnerSetting RunnerSetting `json:"RunnerSetting,omitempty"` - CreatedOn time.Time `json:"createdOn,omitempty"` - RunnerJitConfigURL string `json:"runnerJitConfigUrl,omitempty"` - GetAcquirableJobsURL string `json:"getAcquirableJobsUrl,omitempty"` - AcquireJobsURL string `json:"acquireJobsUrl,omitempty"` - Statistics *RunnerScaleSetStatistic `json:"statistics,omitempty"` - Status interface{} `json:"status,omitempty"` - Enabled *bool `json:"enabled,omitempty"` -} - -type RunnerScaleSetsResponse struct { - Count int `json:"count"` - RunnerScaleSets []RunnerScaleSet `json:"value"` -} - -type ActionsServiceAdminInfoResponse struct { - URL string `json:"url,omitempty"` - Token string `json:"token,omitempty"` -} - -func (a ActionsServiceAdminInfoResponse) GetURL() (*url.URL, error) { - if a.URL == "" { - return nil, fmt.Errorf("no url specified") - } - u, err := url.ParseRequestURI(a.URL) - if err != nil { - return nil, fmt.Errorf("failed to parse URL: %w", err) - } - return u, nil -} - -func (a ActionsServiceAdminInfoResponse) getJWT() (*jwt.Token, error) { - // We're parsing a token we got from the GitHub API. We can't verify its signature. - // We do need the expiration date however, or other info. - token, _, err := jwt.NewParser().ParseUnverified(a.Token, &jwt.RegisteredClaims{}) - if err != nil { - return nil, fmt.Errorf("failed to parse jwt token: %w", err) - } - return token, nil -} - -func (a ActionsServiceAdminInfoResponse) ExiresAt() (time.Time, error) { - jwt, err := a.getJWT() - if err != nil { - return time.Time{}, fmt.Errorf("failed to decode jwt token: %w", err) - } - expiration, err := jwt.Claims.GetExpirationTime() - if err != nil { - return time.Time{}, fmt.Errorf("failed to get expiration time: %w", err) - } - - return expiration.Time, nil -} - -func (a ActionsServiceAdminInfoResponse) IsExpired() bool { - if exp, err := a.ExiresAt(); err == nil { - return time.Now().UTC().After(exp) - } - return true -} - -func (a ActionsServiceAdminInfoResponse) TimeRemaining() (time.Duration, error) { - exp, err := a.ExiresAt() - if err != nil { - return 0, fmt.Errorf("failed to get expiration: %w", err) - } - now := time.Now().UTC() - return exp.Sub(now), nil -} - -func (a ActionsServiceAdminInfoResponse) ExpiresIn(t time.Duration) bool { - remaining, err := a.TimeRemaining() - if err != nil { - return true - } - return remaining <= t -} - -type ActionsServiceAdminInfoRequest struct { - URL string `json:"url,omitempty"` - RunnerEvent string `json:"runner_event,omitempty"` -} - -type RunnerScaleSetSession struct { - SessionID *uuid.UUID `json:"sessionId,omitempty"` - OwnerName string `json:"ownerName,omitempty"` - RunnerScaleSet *RunnerScaleSet `json:"runnerScaleSet,omitempty"` - MessageQueueURL string `json:"messageQueueUrl,omitempty"` - MessageQueueAccessToken string `json:"messageQueueAccessToken,omitempty"` - Statistics *RunnerScaleSetStatistic `json:"statistics,omitempty"` -} - -func (a RunnerScaleSetSession) GetURL() (*url.URL, error) { - if a.MessageQueueURL == "" { - return nil, fmt.Errorf("no url specified") - } - u, err := url.ParseRequestURI(a.MessageQueueURL) - if err != nil { - return nil, fmt.Errorf("failed to parse URL: %w", err) - } - return u, nil -} - -func (a RunnerScaleSetSession) getJWT() (*jwt.Token, error) { - // We're parsing a token we got from the GitHub API. We can't verify its signature. - // We do need the expiration date however, or other info. - token, _, err := jwt.NewParser().ParseUnverified(a.MessageQueueAccessToken, &jwt.RegisteredClaims{}) - if err != nil { - return nil, fmt.Errorf("failed to parse jwt token: %w", err) - } - return token, nil -} - -func (a RunnerScaleSetSession) ExiresAt() (time.Time, error) { - jwt, err := a.getJWT() - if err != nil { - return time.Time{}, fmt.Errorf("failed to decode jwt token: %w", err) - } - expiration, err := jwt.Claims.GetExpirationTime() - if err != nil { - return time.Time{}, fmt.Errorf("failed to get expiration time: %w", err) - } - - return expiration.Time, nil -} - -func (a RunnerScaleSetSession) IsExpired() bool { - if exp, err := a.ExiresAt(); err == nil { - return time.Now().UTC().After(exp) - } - return true -} - -func (a RunnerScaleSetSession) TimeRemaining() (time.Duration, error) { - exp, err := a.ExiresAt() - if err != nil { - return 0, fmt.Errorf("failed to get expiration: %w", err) - } - now := time.Now().UTC() - return exp.Sub(now), nil -} - -func (a RunnerScaleSetSession) ExpiresIn(t time.Duration) bool { - remaining, err := a.TimeRemaining() - if err != nil { - return true - } - return remaining <= t -} - -type RunnerScaleSetMessage struct { - MessageID int64 `json:"messageId"` - MessageType string `json:"messageType"` - Body string `json:"body"` - Statistics *RunnerScaleSetStatistic `json:"statistics"` -} - -func (r RunnerScaleSetMessage) IsNil() bool { - return r.MessageID == 0 && r.MessageType == "" && r.Body == "" && r.Statistics == nil -} - -func (r RunnerScaleSetMessage) GetJobsFromBody() ([]ScaleSetJobMessage, error) { - var body []ScaleSetJobMessage - if r.Body == "" { - return nil, fmt.Errorf("no body specified") - } - if err := json.Unmarshal([]byte(r.Body), &body); err != nil { - return nil, fmt.Errorf("failed to unmarshal body: %w", err) - } - return body, nil -} - -type RunnerReference struct { - ID int64 `json:"id"` - Name string `json:"name"` - OS string `json:"os"` - RunnerScaleSetID int `json:"runnerScaleSetId"` - CreatedOn any `json:"createdOn"` - RunnerGroupID uint64 `json:"runnerGroupId"` - RunnerGroupName string `json:"runnerGroupName"` - Version string `json:"version"` - Enabled bool `json:"enabled"` - Ephemeral bool `json:"ephemeral"` - Status any `json:"status"` - DisableUpdate bool `json:"disableUpdate"` - ProvisioningState string `json:"provisioningState"` - Busy bool `json:"busy"` - Labels []Label `json:"labels,omitempty"` -} - -func (r RunnerReference) GetStatus() RunnerStatus { - status, ok := r.Status.(string) - if !ok { - return RunnerUnknown - } - runnerStatus := RunnerStatus(status) - if !runnerStatus.IsValid() { - return RunnerUnknown - } - - if runnerStatus == RunnerOnline { - if r.Busy { - return RunnerActive - } - return RunnerIdle - } - return runnerStatus -} - -type RunnerScaleSetJitRunnerConfig struct { - Runner *RunnerReference `json:"runner"` - EncodedJITConfig string `json:"encodedJITConfig"` -} - -func (r RunnerScaleSetJitRunnerConfig) DecodedJITConfig() (map[string]string, error) { - if r.EncodedJITConfig == "" { - return nil, fmt.Errorf("no encoded JIT config specified") - } - decoded, err := base64.StdEncoding.DecodeString(r.EncodedJITConfig) - if err != nil { - return nil, fmt.Errorf("failed to decode JIT config: %w", err) - } - jitConfig := make(map[string]string) - if err := json.Unmarshal(decoded, &jitConfig); err != nil { - return nil, fmt.Errorf("failed to unmarshal JIT config: %w", err) - } - return jitConfig, nil -} - -type RunnerReferenceList struct { - Count int `json:"count"` - RunnerReferences []RunnerReference `json:"value"` -} - -type AcquirableJobList struct { - Count int `json:"count"` - Jobs []AcquirableJob `json:"value"` -} - -type AcquirableJob struct { - AcquireJobURL string `json:"acquireJobUrl"` - MessageType string `json:"messageType"` - RunnerRequestID int64 `json:"run0ne00rRequestId"` - RepositoryName string `json:"repositoryName"` - OwnerName string `json:"ownerName"` - JobWorkflowRef string `json:"jobWorkflowRef"` - EventName string `json:"eventName"` - RequestLabels []string `json:"requestLabels"` -} - -type RunnerGroup struct { - ID int64 `json:"id"` - Name string `json:"name"` - Size int64 `json:"size"` - IsDefault bool `json:"isDefaultGroup"` -} - -type RunnerGroupList struct { - Count int `json:"count"` - RunnerGroups []RunnerGroup `json:"value"` -} - -type ScaleSetJobMessage struct { - MessageType string `json:"messageType,omitempty"` - JobID string `json:"jobId,omitempty"` - RunnerRequestID int64 `json:"runnerRequestId,omitempty"` - RepositoryName string `json:"repositoryName,omitempty"` - OwnerName string `json:"ownerName,omitempty"` - JobWorkflowRef string `json:"jobWorkflowRef,omitempty"` - JobDisplayName string `json:"jobDisplayName,omitempty"` - WorkflowRunID int64 `json:"workflowRunId,omitempty"` - EventName string `json:"eventName,omitempty"` - RequestLabels []string `json:"requestLabels,omitempty"` - QueueTime time.Time `json:"queueTime,omitempty"` - ScaleSetAssignTime time.Time `json:"scaleSetAssignTime,omitempty"` - RunnerAssignTime time.Time `json:"runnerAssignTime,omitempty"` - FinishTime time.Time `json:"finishTime,omitempty"` - Result string `json:"result,omitempty"` - RunnerID int64 `json:"runnerId,omitempty"` - RunnerName string `json:"runnerName,omitempty"` - AcquireJobURL string `json:"acquireJobUrl,omitempty"` -} - -func (s ScaleSetJobMessage) MessageTypeToStatus() JobStatus { - switch s.MessageType { - case MessageTypeJobAssigned: - return JobStatusQueued - case MessageTypeJobStarted: - return JobStatusInProgress - case MessageTypeJobCompleted: - return JobStatusCompleted - default: - return JobStatusQueued - } -} - -func (s ScaleSetJobMessage) ToJob() Job { - return Job{ - ScaleSetJobID: s.JobID, - Action: s.EventName, - RunID: s.WorkflowRunID, - Status: string(s.MessageTypeToStatus()), - Conclusion: s.Result, - CompletedAt: s.FinishTime, - StartedAt: s.RunnerAssignTime, - Name: s.JobDisplayName, - GithubRunnerID: s.RunnerID, - RunnerName: s.RunnerName, - RepositoryName: s.RepositoryName, - RepositoryOwner: s.OwnerName, - Labels: s.RequestLabels, - } -} diff --git a/params/interfaces.go b/params/interfaces.go deleted file mode 100644 index 31ef635f..00000000 --- a/params/interfaces.go +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package params - -import "time" - -// EntityGetter is implemented by all github entities (repositories, organizations and enterprises). -// It defines the GetEntity() function which returns a github entity. -type EntityGetter interface { - GetEntity() (ForgeEntity, error) -} - -type IDGetter interface { - GetID() uint -} - -type CreationDateGetter interface { - GetCreatedAt() time.Time -} - -type ForgeCredentialsGetter interface { - GetForgeCredentials() ForgeCredentials -} diff --git a/params/params.go b/params/params.go index 1acd95e1..ed9403b2 100644 --- a/params/params.go +++ b/params/params.go @@ -22,7 +22,6 @@ import ( "encoding/json" "encoding/pem" "fmt" - "math" "net" "net/http" "time" @@ -37,32 +36,17 @@ import ( ) type ( - ForgeEntityType string + GithubEntityType string EventType string EventLevel string ProviderType string JobStatus string RunnerStatus string WebhookEndpointType string - ForgeAuthType string - EndpointType string + GithubAuthType string PoolBalancerType string - ScaleSetState string - ScaleSetMessageType string ) -func (s RunnerStatus) IsValid() bool { - switch s { - case RunnerIdle, RunnerPending, RunnerTerminated, - RunnerInstalling, RunnerFailed, - RunnerActive, RunnerOffline, - RunnerUnknown, RunnerOnline: - - return true - } - return false -} - const ( // PoolBalancerTypeRoundRobin will try to cycle through the pools of an entity // in a round robin fashion. For example, if a repository has multiple pools that @@ -78,12 +62,6 @@ const ( PoolBalancerTypeNone PoolBalancerType = "" ) -const ( - AutoEndpointType EndpointType = "" - GithubEndpointType EndpointType = "github" - GiteaEndpointType EndpointType = "gitea" -) - const ( // LXDProvider represents the LXD provider. LXDProvider ProviderType = "lxd" @@ -108,9 +86,9 @@ const ( ) const ( - ForgeEntityTypeRepository ForgeEntityType = "repository" - ForgeEntityTypeOrganization ForgeEntityType = "organization" - ForgeEntityTypeEnterprise ForgeEntityType = "enterprise" + GithubEntityTypeRepository GithubEntityType = "repository" + GithubEntityTypeOrganization GithubEntityType = "organization" + GithubEntityTypeEnterprise GithubEntityType = "enterprise" ) const ( @@ -137,42 +115,19 @@ const ( RunnerInstalling RunnerStatus = "installing" RunnerFailed RunnerStatus = "failed" RunnerActive RunnerStatus = "active" - RunnerOffline RunnerStatus = "offline" - RunnerOnline RunnerStatus = "online" - RunnerUnknown RunnerStatus = "unknown" ) const ( - // ForgeAuthTypePAT is the OAuth token based authentication - ForgeAuthTypePAT ForgeAuthType = "pat" - // ForgeAuthTypeApp is the GitHub App based authentication - ForgeAuthTypeApp ForgeAuthType = "app" + // GithubAuthTypePAT is the OAuth token based authentication + GithubAuthTypePAT GithubAuthType = "pat" + // GithubAuthTypeApp is the GitHub App based authentication + GithubAuthTypeApp GithubAuthType = "app" ) -func (e ForgeEntityType) String() string { +func (e GithubEntityType) String() string { return string(e) } -const ( - ScaleSetPendingCreate ScaleSetState = "pending_create" - ScaleSetCreated ScaleSetState = "created" - ScaleSetError ScaleSetState = "error" - ScaleSetPendingDelete ScaleSetState = "pending_delete" - ScaleSetPendingForceDelete ScaleSetState = "pending_force_delete" -) - -const ( - MessageTypeRunnerScaleSetJobMessages ScaleSetMessageType = "RunnerScaleSetJobMessages" -) - -const ( - MessageTypeJobAssigned = "JobAssigned" - MessageTypeJobCompleted = "JobCompleted" - MessageTypeJobStarted = "JobStarted" - MessageTypeJobAvailable = "JobAvailable" -) - -// swagger:model StatusMessage type StatusMessage struct { CreatedAt time.Time `json:"created_at,omitempty"` Message string `json:"message,omitempty"` @@ -180,17 +135,6 @@ type StatusMessage struct { EventLevel EventLevel `json:"event_level,omitempty"` } -// swagger:model EntityEvent -type EntityEvent struct { - ID uint `json:"id,omitempty"` - CreatedAt time.Time `json:"created_at,omitempty"` - - EventType EventType `json:"event_type,omitempty"` - EventLevel EventLevel `json:"event_level,omitempty"` - Message string `json:"message,omitempty"` -} - -// swagger:model Instance type Instance struct { // ID is the database ID of this instance. ID string `json:"id,omitempty"` @@ -200,10 +144,6 @@ type Instance struct { // instance in the provider. ProviderID string `json:"provider_id,omitempty"` - // ProviderName is the name of the IaaS where the instance was - // created. - ProviderName string `json:"provider_name"` - // AgentID is the github runner agent ID. AgentID int64 `json:"agent_id,omitempty"` @@ -239,9 +179,6 @@ type Instance struct { // PoolID is the ID of the garm pool to which a runner belongs. PoolID string `json:"pool_id,omitempty"` - // ScaleSetID is the ID of the scale set to which a runner belongs. - ScaleSetID uint `json:"scale_set_id,omitempty"` - // ProviderFault holds any error messages captured from the IaaS provider that is // responsible for managing the lifecycle of the runner. ProviderFault []byte `json:"provider_fault,omitempty"` @@ -272,10 +209,6 @@ type Instance struct { JitConfiguration map[string]string `json:"-"` } -func (i Instance) GetCreatedAt() time.Time { - return i.CreatedAt -} - func (i Instance) GetName() string { return i.Name } @@ -285,7 +218,6 @@ func (i Instance) GetID() string { } // used by swagger client generated code -// swagger:model Instances type Instances []Instance type BootstrapInstance struct { @@ -356,36 +288,29 @@ type Tag struct { Name string `json:"name,omitempty"` } -// swagger:model Pool type Pool struct { RunnerPrefix - ID string `json:"id,omitempty"` - ProviderName string `json:"provider_name,omitempty"` - MaxRunners uint `json:"max_runners,omitempty"` - MinIdleRunners uint `json:"min_idle_runners,omitempty"` - Image string `json:"image,omitempty"` - Flavor string `json:"flavor,omitempty"` - OSType commonParams.OSType `json:"os_type,omitempty"` - OSArch commonParams.OSArch `json:"os_arch,omitempty"` - Tags []Tag `json:"tags,omitempty"` - Enabled bool `json:"enabled,omitempty"` - Instances []Instance `json:"instances,omitempty"` - - RepoID string `json:"repo_id,omitempty"` - RepoName string `json:"repo_name,omitempty"` - - OrgID string `json:"org_id,omitempty"` - OrgName string `json:"org_name,omitempty"` - - EnterpriseID string `json:"enterprise_id,omitempty"` - EnterpriseName string `json:"enterprise_name,omitempty"` - - Endpoint ForgeEndpoint `json:"endpoint,omitempty"` - - RunnerBootstrapTimeout uint `json:"runner_bootstrap_timeout,omitempty"` - CreatedAt time.Time `json:"created_at,omitempty"` - UpdatedAt time.Time `json:"updated_at,omitempty"` + ID string `json:"id,omitempty"` + ProviderName string `json:"provider_name,omitempty"` + MaxRunners uint `json:"max_runners,omitempty"` + MinIdleRunners uint `json:"min_idle_runners,omitempty"` + Image string `json:"image,omitempty"` + Flavor string `json:"flavor,omitempty"` + OSType commonParams.OSType `json:"os_type,omitempty"` + OSArch commonParams.OSArch `json:"os_arch,omitempty"` + Tags []Tag `json:"tags,omitempty"` + Enabled bool `json:"enabled,omitempty"` + Instances []Instance `json:"instances,omitempty"` + RepoID string `json:"repo_id,omitempty"` + RepoName string `json:"repo_name,omitempty"` + OrgID string `json:"org_id,omitempty"` + OrgName string `json:"org_name,omitempty"` + EnterpriseID string `json:"enterprise_id,omitempty"` + EnterpriseName string `json:"enterprise_name,omitempty"` + RunnerBootstrapTimeout uint `json:"runner_bootstrap_timeout,omitempty"` + CreatedAt time.Time `json:"created_at,omitempty"` + UpdatedAt time.Time `json:"updated_at,omitempty"` // ExtraSpecs is an opaque raw json that gets sent to the provider // as part of the bootstrap params for instances. It can contain // any kind of data needed by providers. The contents of this field means @@ -402,56 +327,25 @@ type Pool struct { Priority uint `json:"priority,omitempty"` } -func (p Pool) BelongsTo(entity ForgeEntity) bool { +func (p Pool) GithubEntity() (GithubEntity, error) { switch p.PoolType() { - case ForgeEntityTypeRepository: - return p.RepoID == entity.ID - case ForgeEntityTypeOrganization: - return p.OrgID == entity.ID - case ForgeEntityTypeEnterprise: - return p.EnterpriseID == entity.ID - } - return false -} - -func (p Pool) GetCreatedAt() time.Time { - return p.CreatedAt -} - -func (p Pool) MinIdleRunnersAsInt() int { - if p.MinIdleRunners > math.MaxInt { - return math.MaxInt - } - - return int(p.MinIdleRunners) -} - -func (p Pool) MaxRunnersAsInt() int { - if p.MaxRunners > math.MaxInt { - return math.MaxInt - } - return int(p.MaxRunners) -} - -func (p Pool) GetEntity() (ForgeEntity, error) { - switch p.PoolType() { - case ForgeEntityTypeRepository: - return ForgeEntity{ + case GithubEntityTypeRepository: + return GithubEntity{ ID: p.RepoID, - EntityType: ForgeEntityTypeRepository, + EntityType: GithubEntityTypeRepository, }, nil - case ForgeEntityTypeOrganization: - return ForgeEntity{ + case GithubEntityTypeOrganization: + return GithubEntity{ ID: p.OrgID, - EntityType: ForgeEntityTypeOrganization, + EntityType: GithubEntityTypeOrganization, }, nil - case ForgeEntityTypeEnterprise: - return ForgeEntity{ + case GithubEntityTypeEnterprise: + return GithubEntity{ ID: p.EnterpriseID, - EntityType: ForgeEntityTypeEnterprise, + EntityType: GithubEntityTypeEnterprise, }, nil } - return ForgeEntity{}, fmt.Errorf("pool has no associated entity") + return GithubEntity{}, fmt.Errorf("pool has no associated entity") } func (p Pool) GetID() string { @@ -465,14 +359,14 @@ func (p *Pool) RunnerTimeout() uint { return p.RunnerBootstrapTimeout } -func (p *Pool) PoolType() ForgeEntityType { +func (p *Pool) PoolType() GithubEntityType { switch { case p.RepoID != "": - return ForgeEntityTypeRepository + return GithubEntityTypeRepository case p.OrgID != "": - return ForgeEntityTypeOrganization + return GithubEntityTypeOrganization case p.EnterpriseID != "": - return ForgeEntityTypeEnterprise + return GithubEntityTypeEnterprise } return "" } @@ -492,123 +386,8 @@ func (p *Pool) HasRequiredLabels(set []string) bool { } // used by swagger client generated code -// swagger:model Pools type Pools []Pool -// swagger:model ScaleSet -type ScaleSet struct { - RunnerPrefix - - CreatedAt time.Time `json:"created_at,omitempty"` - UpdatedAt time.Time `json:"updated_at,omitempty"` - - ID uint `json:"id,omitempty"` - ScaleSetID int `json:"scale_set_id,omitempty"` - Name string `json:"name,omitempty"` - DisableUpdate bool `json:"disable_update"` - - State ScaleSetState `json:"state"` - ExtendedState string `json:"extended_state,omitempty"` - - ProviderName string `json:"provider_name,omitempty"` - MaxRunners uint `json:"max_runners,omitempty"` - MinIdleRunners uint `json:"min_idle_runners,omitempty"` - Image string `json:"image,omitempty"` - Flavor string `json:"flavor,omitempty"` - OSType commonParams.OSType `json:"os_type,omitempty"` - OSArch commonParams.OSArch `json:"os_arch,omitempty"` - Enabled bool `json:"enabled,omitempty"` - Instances []Instance `json:"instances,omitempty"` - DesiredRunnerCount int `json:"desired_runner_count,omitempty"` - - Endpoint ForgeEndpoint `json:"endpoint,omitempty"` - - RunnerBootstrapTimeout uint `json:"runner_bootstrap_timeout,omitempty"` - // ExtraSpecs is an opaque raw json that gets sent to the provider - // as part of the bootstrap params for instances. It can contain - // any kind of data needed by providers. The contents of this field means - // nothing to garm itself. We don't act on the information in this field at - // all. We only validate that it's a proper json. - ExtraSpecs json.RawMessage `json:"extra_specs,omitempty"` - // GithubRunnerGroup is the github runner group in which the runners will be added. - // The runner group must be created by someone with access to the enterprise. - GitHubRunnerGroup string `json:"github-runner-group,omitempty"` - - StatusMessages []StatusMessage `json:"status_messages"` - - RepoID string `json:"repo_id,omitempty"` - RepoName string `json:"repo_name,omitempty"` - - OrgID string `json:"org_id,omitempty"` - OrgName string `json:"org_name,omitempty"` - - EnterpriseID string `json:"enterprise_id,omitempty"` - EnterpriseName string `json:"enterprise_name,omitempty"` - - LastMessageID int64 `json:"-"` -} - -func (p ScaleSet) BelongsTo(entity ForgeEntity) bool { - switch p.ScaleSetType() { - case ForgeEntityTypeRepository: - return p.RepoID == entity.ID - case ForgeEntityTypeOrganization: - return p.OrgID == entity.ID - case ForgeEntityTypeEnterprise: - return p.EnterpriseID == entity.ID - } - return false -} - -func (p ScaleSet) GetID() uint { - return p.ID -} - -func (p ScaleSet) GetEntity() (ForgeEntity, error) { - switch p.ScaleSetType() { - case ForgeEntityTypeRepository: - return ForgeEntity{ - ID: p.RepoID, - EntityType: ForgeEntityTypeRepository, - }, nil - case ForgeEntityTypeOrganization: - return ForgeEntity{ - ID: p.OrgID, - EntityType: ForgeEntityTypeOrganization, - }, nil - case ForgeEntityTypeEnterprise: - return ForgeEntity{ - ID: p.EnterpriseID, - EntityType: ForgeEntityTypeEnterprise, - }, nil - } - return ForgeEntity{}, fmt.Errorf("scale set has no associated entity") -} - -func (p *ScaleSet) ScaleSetType() ForgeEntityType { - switch { - case p.RepoID != "": - return ForgeEntityTypeRepository - case p.OrgID != "": - return ForgeEntityTypeOrganization - case p.EnterpriseID != "": - return ForgeEntityTypeEnterprise - } - return "" -} - -func (p *ScaleSet) RunnerTimeout() uint { - if p.RunnerBootstrapTimeout == 0 { - return appdefaults.DefaultRunnerBootstrapTimeout - } - return p.RunnerBootstrapTimeout -} - -// used by swagger client generated code -// swagger:model ScaleSets -type ScaleSets []ScaleSet - -// swagger:model Repository type Repository struct { ID string `json:"id,omitempty"` Owner string `json:"owner,omitempty"` @@ -617,46 +396,30 @@ type Repository struct { // CredentialName is the name of the credentials associated with the enterprise. // This field is now deprecated. Use CredentialsID instead. This field will be // removed in v0.2.0. - CredentialsName string `json:"credentials_name,omitempty"` - - CredentialsID uint `json:"credentials_id,omitempty"` - Credentials ForgeCredentials `json:"credentials,omitempty"` - + CredentialsName string `json:"credentials_name,omitempty"` + CredentialsID uint `json:"credentials_id,omitempty"` + Credentials GithubCredentials `json:"credentials,omitempty"` PoolManagerStatus PoolManagerStatus `json:"pool_manager_status,omitempty"` PoolBalancerType PoolBalancerType `json:"pool_balancing_type,omitempty"` - Endpoint ForgeEndpoint `json:"endpoint,omitempty"` + Endpoint GithubEndpoint `json:"endpoint,omitempty"` CreatedAt time.Time `json:"created_at,omitempty"` UpdatedAt time.Time `json:"updated_at,omitempty"` - Events []EntityEvent `json:"events,omitempty"` // Do not serialize sensitive info. WebhookSecret string `json:"-"` } -func (r Repository) GetCredentialsName() string { - if r.CredentialsName != "" { - return r.CredentialsName - } - return r.Credentials.Name -} - -func (r Repository) CreationDateGetter() time.Time { - return r.CreatedAt -} - -func (r Repository) GetEntity() (ForgeEntity, error) { +func (r Repository) GetEntity() (GithubEntity, error) { if r.ID == "" { - return ForgeEntity{}, fmt.Errorf("repository has no ID") + return GithubEntity{}, fmt.Errorf("repository has no ID") } - return ForgeEntity{ + return GithubEntity{ ID: r.ID, - EntityType: ForgeEntityTypeRepository, + EntityType: GithubEntityTypeRepository, Owner: r.Owner, Name: r.Name, PoolBalancerType: r.PoolBalancerType, Credentials: r.Credentials, WebhookSecret: r.WebhookSecret, - CreatedAt: r.CreatedAt, - UpdatedAt: r.UpdatedAt, }, nil } @@ -680,10 +443,8 @@ func (r Repository) String() string { } // used by swagger client generated code -// swagger:model Repositories type Repositories []Repository -// swagger:model Organization type Organization struct { ID string `json:"id,omitempty"` Name string `json:"name,omitempty"` @@ -692,35 +453,28 @@ type Organization struct { // This field is now deprecated. Use CredentialsID instead. This field will be // removed in v0.2.0. CredentialsName string `json:"credentials_name,omitempty"` - Credentials ForgeCredentials `json:"credentials,omitempty"` + Credentials GithubCredentials `json:"credentials,omitempty"` CredentialsID uint `json:"credentials_id,omitempty"` PoolManagerStatus PoolManagerStatus `json:"pool_manager_status,omitempty"` PoolBalancerType PoolBalancerType `json:"pool_balancing_type,omitempty"` - Endpoint ForgeEndpoint `json:"endpoint,omitempty"` + Endpoint GithubEndpoint `json:"endpoint,omitempty"` CreatedAt time.Time `json:"created_at,omitempty"` UpdatedAt time.Time `json:"updated_at,omitempty"` - Events []EntityEvent `json:"events,omitempty"` // Do not serialize sensitive info. WebhookSecret string `json:"-"` } -func (o Organization) GetCreatedAt() time.Time { - return o.CreatedAt -} - -func (o Organization) GetEntity() (ForgeEntity, error) { +func (o Organization) GetEntity() (GithubEntity, error) { if o.ID == "" { - return ForgeEntity{}, fmt.Errorf("organization has no ID") + return GithubEntity{}, fmt.Errorf("organization has no ID") } - return ForgeEntity{ + return GithubEntity{ ID: o.ID, - EntityType: ForgeEntityTypeOrganization, + EntityType: GithubEntityTypeOrganization, Owner: o.Name, WebhookSecret: o.WebhookSecret, PoolBalancerType: o.PoolBalancerType, Credentials: o.Credentials, - CreatedAt: o.CreatedAt, - UpdatedAt: o.UpdatedAt, }, nil } @@ -740,10 +494,8 @@ func (o Organization) GetBalancerType() PoolBalancerType { } // used by swagger client generated code -// swagger:model Organizations type Organizations []Organization -// swagger:model Enterprise type Enterprise struct { ID string `json:"id,omitempty"` Name string `json:"name,omitempty"` @@ -752,35 +504,28 @@ type Enterprise struct { // This field is now deprecated. Use CredentialsID instead. This field will be // removed in v0.2.0. CredentialsName string `json:"credentials_name,omitempty"` - Credentials ForgeCredentials `json:"credentials,omitempty"` + Credentials GithubCredentials `json:"credentials,omitempty"` CredentialsID uint `json:"credentials_id,omitempty"` PoolManagerStatus PoolManagerStatus `json:"pool_manager_status,omitempty"` PoolBalancerType PoolBalancerType `json:"pool_balancing_type,omitempty"` - Endpoint ForgeEndpoint `json:"endpoint,omitempty"` + Endpoint GithubEndpoint `json:"endpoint,omitempty"` CreatedAt time.Time `json:"created_at,omitempty"` UpdatedAt time.Time `json:"updated_at,omitempty"` - Events []EntityEvent `json:"events,omitempty"` // Do not serialize sensitive info. WebhookSecret string `json:"-"` } -func (e Enterprise) GetCreatedAt() time.Time { - return e.CreatedAt -} - -func (e Enterprise) GetEntity() (ForgeEntity, error) { +func (e Enterprise) GetEntity() (GithubEntity, error) { if e.ID == "" { - return ForgeEntity{}, fmt.Errorf("enterprise has no ID") + return GithubEntity{}, fmt.Errorf("enterprise has no ID") } - return ForgeEntity{ + return GithubEntity{ ID: e.ID, - EntityType: ForgeEntityTypeEnterprise, + EntityType: GithubEntityTypeEnterprise, Owner: e.Name, WebhookSecret: e.WebhookSecret, PoolBalancerType: e.PoolBalancerType, Credentials: e.Credentials, - CreatedAt: e.CreatedAt, - UpdatedAt: e.UpdatedAt, }, nil } @@ -800,11 +545,9 @@ func (e Enterprise) GetBalancerType() PoolBalancerType { } // used by swagger client generated code -// swagger:model Enterprises type Enterprises []Enterprise // Users holds information about a particular user -// swagger:model User type User struct { ID string `json:"id,omitempty"` CreatedAt time.Time `json:"created_at,omitempty"` @@ -821,12 +564,10 @@ type User struct { // JWTResponse holds the JWT token returned as a result of a // successful auth -// swagger:model JWTResponse type JWTResponse struct { Token string `json:"token,omitempty"` } -// swagger:model ControllerInfo type ControllerInfo struct { // ControllerID is the unique ID of this controller. This ID gets generated // automatically on controller init. @@ -871,63 +612,28 @@ type ControllerInfo struct { Version string `json:"version,omitempty"` } -func (c *ControllerInfo) JobBackoff() time.Duration { - if math.MaxInt64 > c.MinimumJobAgeBackoff { - return time.Duration(math.MaxInt64) - } +type GithubCredentials struct { + ID uint `json:"id,omitempty"` + Name string `json:"name,omitempty"` + Description string `json:"description,omitempty"` + APIBaseURL string `json:"api_base_url,omitempty"` + UploadBaseURL string `json:"upload_base_url,omitempty"` + BaseURL string `json:"base_url,omitempty"` + CABundle []byte `json:"ca_bundle,omitempty"` + AuthType GithubAuthType `json:"auth-type,omitempty"` - return time.Duration(int64(c.MinimumJobAgeBackoff)) -} - -// swagger:model GithubRateLimit -type GithubRateLimit struct { - Limit int `json:"limit,omitempty"` - Used int `json:"used,omitempty"` - Remaining int `json:"remaining,omitempty"` - Reset int64 `json:"reset,omitempty"` -} - -func (g GithubRateLimit) ResetIn() time.Duration { - return time.Until(g.ResetAt()) -} - -func (g GithubRateLimit) ResetAt() time.Time { - if g.Reset == 0 { - return time.Time{} - } - return time.Unix(g.Reset, 0) -} - -// swagger:model ForgeCredentials -type ForgeCredentials struct { - ID uint `json:"id,omitempty"` - Name string `json:"name,omitempty"` - Description string `json:"description,omitempty"` - APIBaseURL string `json:"api_base_url,omitempty"` - UploadBaseURL string `json:"upload_base_url,omitempty"` - BaseURL string `json:"base_url,omitempty"` - CABundle []byte `json:"ca_bundle,omitempty"` - AuthType ForgeAuthType `json:"auth-type,omitempty"` - - ForgeType EndpointType `json:"forge_type,omitempty"` - - Repositories []Repository `json:"repositories,omitempty"` - Organizations []Organization `json:"organizations,omitempty"` - Enterprises []Enterprise `json:"enterprises,omitempty"` - Endpoint ForgeEndpoint `json:"endpoint,omitempty"` - CreatedAt time.Time `json:"created_at,omitempty"` - UpdatedAt time.Time `json:"updated_at,omitempty"` - RateLimit *GithubRateLimit `json:"rate_limit,omitempty"` + Repositories []Repository `json:"repositories,omitempty"` + Organizations []Organization `json:"organizations,omitempty"` + Enterprises []Enterprise `json:"enterprises,omitempty"` + Endpoint GithubEndpoint `json:"endpoint,omitempty"` + CreatedAt time.Time `json:"created_at,omitempty"` + UpdatedAt time.Time `json:"updated_at,omitempty"` // Do not serialize sensitive info. CredentialsPayload []byte `json:"-"` } -func (g ForgeCredentials) GetID() uint { - return g.ID -} - -func (g ForgeCredentials) GetHTTPClient(ctx context.Context) (*http.Client, error) { +func (g GithubCredentials) GetHTTPClient(ctx context.Context) (*http.Client, error) { var roots *x509.CertPool if g.CABundle != nil { roots = x509.NewCertPool() @@ -957,7 +663,7 @@ func (g ForgeCredentials) GetHTTPClient(ctx context.Context) (*http.Client, erro var tc *http.Client switch g.AuthType { - case ForgeAuthTypeApp: + case GithubAuthTypeApp: var app GithubApp if err := json.Unmarshal(g.CredentialsPayload, &app); err != nil { return nil, fmt.Errorf("failed to unmarshal github app credentials: %w", err) @@ -993,7 +699,7 @@ func (g ForgeCredentials) GetHTTPClient(ctx context.Context) (*http.Client, erro return tc, nil } -func (g ForgeCredentials) RootCertificateBundle() (CertificateBundle, error) { +func (g GithubCredentials) RootCertificateBundle() (CertificateBundle, error) { if len(g.CABundle) == 0 { return CertificateBundle{}, nil } @@ -1024,10 +730,8 @@ func (g ForgeCredentials) RootCertificateBundle() (CertificateBundle, error) { } // used by swagger client generated code -// swagger:model Credentials -type Credentials []ForgeCredentials +type Credentials []GithubCredentials -// swagger:model Provider type Provider struct { Name string `json:"name,omitempty"` ProviderType ProviderType `json:"type,omitempty"` @@ -1035,10 +739,8 @@ type Provider struct { } // used by swagger client generated code -// swagger:model Providers type Providers []Provider -// swagger:model PoolManagerStatus type PoolManagerStatus struct { IsRunning bool `json:"running,omitempty"` FailureReason string `json:"failure_reason,omitempty"` @@ -1060,14 +762,9 @@ func (p RunnerPrefix) GetRunnerPrefix() string { return p.Prefix } -// swagger:model Job type Job struct { // ID is the ID of the job. ID int64 `json:"id,omitempty"` - - WorkflowJobID int64 `json:"workflow_job_id,omitempty"` - // ScaleSetJobID is the job ID when generated for a scale set. - ScaleSetJobID string `json:"scaleset_job_id,omitempty"` // RunID is the ID of the workflow run. A run may have multiple jobs. RunID int64 `json:"run_id,omitempty"` // Action is the specific activity that triggered the event. @@ -1115,17 +812,14 @@ type Job struct { UpdatedAt time.Time `json:"updated_at,omitempty"` } -// swagger:model Jobs // used by swagger client generated code type Jobs []Job -// swagger:model InstallWebhookParams type InstallWebhookParams struct { WebhookEndpointType WebhookEndpointType `json:"webhook_endpoint_type,omitempty"` InsecureSSL bool `json:"insecure_ssl,omitempty"` } -// swagger:model HookInfo type HookInfo struct { ID int64 `json:"id,omitempty"` URL string `json:"url,omitempty"` @@ -1138,95 +832,56 @@ type CertificateBundle struct { RootCertificates map[string][]byte `json:"root_certificates,omitempty"` } -// swagger:model ForgeEntity type UpdateSystemInfoParams struct { OSName string `json:"os_name,omitempty"` OSVersion string `json:"os_version,omitempty"` AgentID *int64 `json:"agent_id,omitempty"` } -type ForgeEntity struct { - Owner string `json:"owner,omitempty"` - Name string `json:"name,omitempty"` - ID string `json:"id,omitempty"` - EntityType ForgeEntityType `json:"entity_type,omitempty"` - Credentials ForgeCredentials `json:"credentials,omitempty"` - PoolBalancerType PoolBalancerType `json:"pool_balancing_type,omitempty"` - CreatedAt time.Time `json:"created_at,omitempty"` - UpdatedAt time.Time `json:"updated_at,omitempty"` +type GithubEntity struct { + Owner string `json:"owner,omitempty"` + Name string `json:"name,omitempty"` + ID string `json:"id,omitempty"` + EntityType GithubEntityType `json:"entity_type,omitempty"` + Credentials GithubCredentials `json:"credentials,omitempty"` + PoolBalancerType PoolBalancerType `json:"pool_balancing_type,omitempty"` WebhookSecret string `json:"-"` } -func (g ForgeEntity) GetCreatedAt() time.Time { - return g.CreatedAt -} - -func (g ForgeEntity) GetForgeType() (EndpointType, error) { - if g.Credentials.ForgeType == "" { - return "", fmt.Errorf("credentials forge type is empty") - } - return g.Credentials.ForgeType, nil -} - -func (g ForgeEntity) ForgeURL() string { - switch g.EntityType { - case ForgeEntityTypeRepository: - return fmt.Sprintf("%s/%s/%s", g.Credentials.BaseURL, g.Owner, g.Name) - case ForgeEntityTypeOrganization: - return fmt.Sprintf("%s/%s", g.Credentials.BaseURL, g.Owner) - case ForgeEntityTypeEnterprise: - return fmt.Sprintf("%s/enterprises/%s", g.Credentials.BaseURL, g.Owner) - } - return "" -} - -func (g ForgeEntity) GetPoolBalancerType() PoolBalancerType { +func (g GithubEntity) GetPoolBalancerType() PoolBalancerType { if g.PoolBalancerType == "" { return PoolBalancerTypeRoundRobin } return g.PoolBalancerType } -func (g ForgeEntity) LabelScope() string { +func (g GithubEntity) LabelScope() string { switch g.EntityType { - case ForgeEntityTypeRepository: + case GithubEntityTypeRepository: return MetricsLabelRepositoryScope - case ForgeEntityTypeOrganization: + case GithubEntityTypeOrganization: return MetricsLabelOrganizationScope - case ForgeEntityTypeEnterprise: + case GithubEntityTypeEnterprise: return MetricsLabelEnterpriseScope } return "" } -func (g ForgeEntity) String() string { +func (g GithubEntity) String() string { switch g.EntityType { - case ForgeEntityTypeRepository: + case GithubEntityTypeRepository: return fmt.Sprintf("%s/%s", g.Owner, g.Name) - case ForgeEntityTypeOrganization, ForgeEntityTypeEnterprise: + case GithubEntityTypeOrganization, GithubEntityTypeEnterprise: return g.Owner } return "" } -func (g ForgeEntity) GetIDAsUUID() (uuid.UUID, error) { - if g.ID == "" { - return uuid.Nil, nil - } - id, err := uuid.Parse(g.ID) - if err != nil { - return uuid.Nil, fmt.Errorf("failed to parse entity ID: %w", err) - } - return id, nil -} - // used by swagger client generated code -// swagger:model ForgeEndpoints -type ForgeEndpoints []ForgeEndpoint +type GithubEndpoints []GithubEndpoint -// swagger:model ForgeEndpoint -type ForgeEndpoint struct { +type GithubEndpoint struct { Name string `json:"name,omitempty"` Description string `json:"description,omitempty"` APIBaseURL string `json:"api_base_url,omitempty"` @@ -1236,21 +891,5 @@ type ForgeEndpoint struct { CreatedAt time.Time `json:"created_at,omitempty"` UpdatedAt time.Time `json:"updated_at,omitempty"` - EndpointType EndpointType `json:"endpoint_type,omitempty"` -} - -type RepositoryFilter struct { - Owner string - Name string - Endpoint string -} - -type OrganizationFilter struct { - Name string - Endpoint string -} - -type EnterpriseFilter struct { - Name string - Endpoint string + Credentials []GithubCredentials `json:"credentials,omitempty"` } diff --git a/params/requests.go b/params/requests.go index c9021434..c7c46821 100644 --- a/params/requests.go +++ b/params/requests.go @@ -21,6 +21,8 @@ import ( "fmt" "net/url" + "github.com/pkg/errors" + runnerErrors "github.com/cloudbase/garm-provider-common/errors" commonParams "github.com/cloudbase/garm-provider-common/params" ) @@ -37,14 +39,12 @@ type InstanceRequest struct { OSVersion string `json:"os_version"` } -// swagger:model CreateRepoParams type CreateRepoParams struct { Owner string `json:"owner,omitempty"` Name string `json:"name,omitempty"` CredentialsName string `json:"credentials_name,omitempty"` WebhookSecret string `json:"webhook_secret,omitempty"` PoolBalancerType PoolBalancerType `json:"pool_balancer_type,omitempty"` - ForgeType EndpointType `json:"forge_type,omitempty"` } func (c *CreateRepoParams) Validate() error { @@ -63,13 +63,6 @@ func (c *CreateRepoParams) Validate() error { return runnerErrors.NewMissingSecretError("missing secret") } - switch c.ForgeType { - case GithubEndpointType, GiteaEndpointType, AutoEndpointType: - break - default: - return runnerErrors.NewBadRequestError("invalid forge type") - } - switch c.PoolBalancerType { case PoolBalancerTypeRoundRobin, PoolBalancerTypePack, PoolBalancerTypeNone: default: @@ -79,13 +72,11 @@ func (c *CreateRepoParams) Validate() error { return nil } -// swagger:model CreateOrgParams type CreateOrgParams struct { Name string `json:"name,omitempty"` CredentialsName string `json:"credentials_name,omitempty"` WebhookSecret string `json:"webhook_secret,omitempty"` PoolBalancerType PoolBalancerType `json:"pool_balancer_type,omitempty"` - ForgeType EndpointType `json:"forge_type,omitempty"` } func (c *CreateOrgParams) Validate() error { @@ -100,13 +91,6 @@ func (c *CreateOrgParams) Validate() error { return runnerErrors.NewMissingSecretError("missing secret") } - switch c.ForgeType { - case GithubEndpointType, GiteaEndpointType, AutoEndpointType: - break - default: - return runnerErrors.NewBadRequestError("invalid forge type") - } - switch c.PoolBalancerType { case PoolBalancerTypeRoundRobin, PoolBalancerTypePack, PoolBalancerTypeNone: default: @@ -115,7 +99,6 @@ func (c *CreateOrgParams) Validate() error { return nil } -// swagger:model CreateEnterpriseParams type CreateEnterpriseParams struct { Name string `json:"name,omitempty"` CredentialsName string `json:"credentials_name,omitempty"` @@ -144,7 +127,6 @@ func (c *CreateEnterpriseParams) Validate() error { // NewUserParams holds the needed information to create // a new user -// swagger:model NewUserParams type NewUserParams struct { Email string `json:"email,omitempty"` Username string `json:"username,omitempty"` @@ -154,7 +136,6 @@ type NewUserParams struct { Enabled bool `json:"-"` } -// swagger:model UpdatePoolParams type UpdatePoolParams struct { RunnerPrefix @@ -192,7 +173,6 @@ type CreateInstanceParams struct { JitConfiguration map[string]string `json:"jit_configuration,omitempty"` } -// swagger:model CreatePoolParams type CreatePoolParams struct { RunnerPrefix @@ -267,7 +247,6 @@ type UpdateUserParams struct { Enabled *bool `json:"enabled,omitempty"` } -// swagger:model PasswordLoginParams // PasswordLoginParams holds information used during // password authentication, that will be passed to a // password login function @@ -284,7 +263,6 @@ func (p PasswordLoginParams) Validate() error { return nil } -// swagger:model UpdateEntityParams type UpdateEntityParams struct { CredentialsName string `json:"credentials_name,omitempty"` WebhookSecret string `json:"webhook_secret,omitempty"` @@ -297,7 +275,6 @@ type InstanceUpdateMessage struct { AgentID *int64 `json:"agent_id,omitempty"` } -// swagger:model CreateGithubEndpointParams type CreateGithubEndpointParams struct { Name string `json:"name,omitempty"` Description string `json:"description,omitempty"` @@ -365,7 +342,6 @@ func (c CreateGithubEndpointParams) Validate() error { return nil } -// swagger:model UpdateGithubEndpointParams type UpdateGithubEndpointParams struct { Description *string `json:"description,omitempty"` APIBaseURL *string `json:"api_base_url,omitempty"` @@ -424,12 +400,10 @@ func (u UpdateGithubEndpointParams) Validate() error { return nil } -// swagger:model GithubPAT type GithubPAT struct { OAuth2Token string `json:"oauth2_token,omitempty"` } -// swagger:model GithubApp type GithubApp struct { AppID int64 `json:"app_id,omitempty"` InstallationID int64 `json:"installation_id,omitempty"` @@ -462,14 +436,13 @@ func (g GithubApp) Validate() error { return nil } -// swagger:model CreateGithubCredentialsParams type CreateGithubCredentialsParams struct { - Name string `json:"name,omitempty"` - Description string `json:"description,omitempty"` - Endpoint string `json:"endpoint,omitempty"` - AuthType ForgeAuthType `json:"auth_type,omitempty"` - PAT GithubPAT `json:"pat,omitempty"` - App GithubApp `json:"app,omitempty"` + Name string `json:"name,omitempty"` + Description string `json:"description,omitempty"` + Endpoint string `json:"endpoint,omitempty"` + AuthType GithubAuthType `json:"auth_type,omitempty"` + PAT GithubPAT `json:"pat,omitempty"` + App GithubApp `json:"app,omitempty"` } func (c CreateGithubCredentialsParams) Validate() error { @@ -482,27 +455,26 @@ func (c CreateGithubCredentialsParams) Validate() error { } switch c.AuthType { - case ForgeAuthTypePAT, ForgeAuthTypeApp: + case GithubAuthTypePAT, GithubAuthTypeApp: default: return runnerErrors.NewBadRequestError("invalid auth_type") } - if c.AuthType == ForgeAuthTypePAT { + if c.AuthType == GithubAuthTypePAT { if c.PAT.OAuth2Token == "" { return runnerErrors.NewBadRequestError("missing oauth2_token") } } - if c.AuthType == ForgeAuthTypeApp { + if c.AuthType == GithubAuthTypeApp { if err := c.App.Validate(); err != nil { - return fmt.Errorf("invalid app: %w", err) + return errors.Wrap(err, "invalid app") } } return nil } -// swagger:model UpdateGithubCredentialsParams type UpdateGithubCredentialsParams struct { Name *string `json:"name,omitempty"` Description *string `json:"description,omitempty"` @@ -523,14 +495,13 @@ func (u UpdateGithubCredentialsParams) Validate() error { if u.App != nil { if err := u.App.Validate(); err != nil { - return fmt.Errorf("invalid app: %w", err) + return errors.Wrap(err, "invalid app") } } return nil } -// swagger:model UpdateControllerParams type UpdateControllerParams struct { MetadataURL *string `json:"metadata_url,omitempty"` CallbackURL *string `json:"callback_url,omitempty"` @@ -562,228 +533,3 @@ func (u UpdateControllerParams) Validate() error { return nil } - -// swagger:model CreateScaleSetParams -type CreateScaleSetParams struct { - RunnerPrefix - - Name string `json:"name"` - DisableUpdate bool `json:"disable_update"` - ScaleSetID int `json:"scale_set_id"` - - ProviderName string `json:"provider_name,omitempty"` - MaxRunners uint `json:"max_runners,omitempty"` - MinIdleRunners uint `json:"min_idle_runners,omitempty"` - Image string `json:"image,omitempty"` - Flavor string `json:"flavor,omitempty"` - OSType commonParams.OSType `json:"os_type,omitempty"` - OSArch commonParams.OSArch `json:"os_arch,omitempty"` - Tags []string `json:"tags,omitempty"` - Enabled bool `json:"enabled,omitempty"` - RunnerBootstrapTimeout uint `json:"runner_bootstrap_timeout,omitempty"` - ExtraSpecs json.RawMessage `json:"extra_specs,omitempty"` - // GithubRunnerGroup is the github runner group in which the runners of this - // pool will be added to. - // The runner group must be created by someone with access to the enterprise. - GitHubRunnerGroup string `json:"github-runner-group,omitempty"` -} - -func (s *CreateScaleSetParams) Validate() error { - if s.ProviderName == "" { - return fmt.Errorf("missing provider") - } - - if s.MinIdleRunners > s.MaxRunners { - return fmt.Errorf("min_idle_runners cannot be larger than max_runners") - } - - if s.MaxRunners == 0 { - return fmt.Errorf("max_runners cannot be 0") - } - - if s.Flavor == "" { - return fmt.Errorf("missing flavor") - } - - if s.Image == "" { - return fmt.Errorf("missing image") - } - - if s.Name == "" { - return fmt.Errorf("missing scale set name") - } - - return nil -} - -// swagger:model UpdateScaleSetParams -type UpdateScaleSetParams struct { - RunnerPrefix - - Name string `json:"name,omitempty"` - Enabled *bool `json:"enabled,omitempty"` - MaxRunners *uint `json:"max_runners,omitempty"` - MinIdleRunners *uint `json:"min_idle_runners,omitempty"` - RunnerBootstrapTimeout *uint `json:"runner_bootstrap_timeout,omitempty"` - Image string `json:"image,omitempty"` - Flavor string `json:"flavor,omitempty"` - OSType commonParams.OSType `json:"os_type,omitempty"` - OSArch commonParams.OSArch `json:"os_arch,omitempty"` - ExtraSpecs json.RawMessage `json:"extra_specs,omitempty"` - // GithubRunnerGroup is the github runner group in which the runners of this - // pool will be added to. - // The runner group must be created by someone with access to the enterprise. - GitHubRunnerGroup *string `json:"runner_group,omitempty"` - State *ScaleSetState `json:"state"` - ExtendedState *string `json:"extended_state"` - ScaleSetID int `json:"-"` -} - -// swagger:model CreateGiteaEndpointParams -type CreateGiteaEndpointParams struct { - Name string `json:"name,omitempty"` - Description string `json:"description,omitempty"` - APIBaseURL string `json:"api_base_url,omitempty"` - BaseURL string `json:"base_url,omitempty"` - CACertBundle []byte `json:"ca_cert_bundle,omitempty"` -} - -func (c CreateGiteaEndpointParams) Validate() error { - if c.APIBaseURL == "" { - return runnerErrors.NewBadRequestError("missing api_base_url") - } - - url, err := url.Parse(c.APIBaseURL) - if err != nil || url.Scheme == "" || url.Host == "" { - return runnerErrors.NewBadRequestError("invalid api_base_url") - } - switch url.Scheme { - case httpsScheme, httpScheme: - default: - return runnerErrors.NewBadRequestError("invalid api_base_url") - } - - if c.BaseURL == "" { - return runnerErrors.NewBadRequestError("missing base_url") - } - - url, err = url.Parse(c.BaseURL) - if err != nil || url.Scheme == "" || url.Host == "" { - return runnerErrors.NewBadRequestError("invalid base_url") - } - - switch url.Scheme { - case httpsScheme, httpScheme: - default: - return runnerErrors.NewBadRequestError("invalid api_base_url") - } - - if c.CACertBundle != nil { - block, _ := pem.Decode(c.CACertBundle) - if block == nil { - return runnerErrors.NewBadRequestError("invalid ca_cert_bundle") - } - if _, err := x509.ParseCertificates(block.Bytes); err != nil { - return runnerErrors.NewBadRequestError("invalid ca_cert_bundle") - } - } - - return nil -} - -// swagger:model UpdateGiteaEndpointParams -type UpdateGiteaEndpointParams struct { - Description *string `json:"description,omitempty"` - APIBaseURL *string `json:"api_base_url,omitempty"` - BaseURL *string `json:"base_url,omitempty"` - CACertBundle []byte `json:"ca_cert_bundle,omitempty"` -} - -func (u UpdateGiteaEndpointParams) Validate() error { - if u.APIBaseURL != nil { - url, err := url.Parse(*u.APIBaseURL) - if err != nil || url.Scheme == "" || url.Host == "" { - return runnerErrors.NewBadRequestError("invalid api_base_url") - } - switch url.Scheme { - case httpsScheme, httpScheme: - default: - return runnerErrors.NewBadRequestError("invalid api_base_url") - } - } - - if u.BaseURL != nil { - url, err := url.Parse(*u.BaseURL) - if err != nil || url.Scheme == "" || url.Host == "" { - return runnerErrors.NewBadRequestError("invalid base_url") - } - switch url.Scheme { - case httpsScheme, httpScheme: - default: - return runnerErrors.NewBadRequestError("invalid api_base_url") - } - } - - if u.CACertBundle != nil { - block, _ := pem.Decode(u.CACertBundle) - if block == nil { - return runnerErrors.NewBadRequestError("invalid ca_cert_bundle") - } - if _, err := x509.ParseCertificates(block.Bytes); err != nil { - return runnerErrors.NewBadRequestError("invalid ca_cert_bundle") - } - } - - return nil -} - -// swagger:model CreateGiteaCredentialsParams -type CreateGiteaCredentialsParams struct { - Name string `json:"name,omitempty"` - Description string `json:"description,omitempty"` - Endpoint string `json:"endpoint,omitempty"` - AuthType ForgeAuthType `json:"auth_type,omitempty"` - PAT GithubPAT `json:"pat,omitempty"` - App GithubApp `json:"app,omitempty"` -} - -func (c CreateGiteaCredentialsParams) Validate() error { - if c.Name == "" { - return runnerErrors.NewBadRequestError("missing name") - } - - if c.Endpoint == "" { - return runnerErrors.NewBadRequestError("missing endpoint") - } - - switch c.AuthType { - case ForgeAuthTypePAT: - default: - return runnerErrors.NewBadRequestError("invalid auth_type: %s", c.AuthType) - } - - if c.AuthType == ForgeAuthTypePAT { - if c.PAT.OAuth2Token == "" { - return runnerErrors.NewBadRequestError("missing oauth2_token") - } - } - - return nil -} - -// swagger:model UpdateGiteaCredentialsParams -type UpdateGiteaCredentialsParams struct { - Name *string `json:"name,omitempty"` - Description *string `json:"description,omitempty"` - PAT *GithubPAT `json:"pat,omitempty"` -} - -func (u UpdateGiteaCredentialsParams) Validate() error { - if u.PAT != nil { - if u.PAT.OAuth2Token == "" { - return runnerErrors.NewBadRequestError("missing oauth2_token") - } - } - - return nil -} diff --git a/runner/common.go b/runner/common.go deleted file mode 100644 index b1682c0c..00000000 --- a/runner/common.go +++ /dev/null @@ -1,31 +0,0 @@ -package runner - -import ( - "context" - "errors" - "fmt" - - runnerErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm/params" -) - -func (r *Runner) ResolveForgeCredentialByName(ctx context.Context, credentialsName string) (params.ForgeCredentials, error) { - githubCred, err := r.store.GetGithubCredentialsByName(ctx, credentialsName, false) - if err != nil && !errors.Is(err, runnerErrors.ErrNotFound) { - return params.ForgeCredentials{}, fmt.Errorf("error fetching github credentials: %w", err) - } - giteaCred, err := r.store.GetGiteaCredentialsByName(ctx, credentialsName, false) - if err != nil && !errors.Is(err, runnerErrors.ErrNotFound) { - return params.ForgeCredentials{}, fmt.Errorf("error fetching gitea credentials: %w", err) - } - if githubCred.ID != 0 && giteaCred.ID != 0 { - return params.ForgeCredentials{}, runnerErrors.NewBadRequestError("credentials %s are defined for both GitHub and Gitea, please specify the forge type", credentialsName) - } - if githubCred.ID != 0 { - return githubCred, nil - } - if giteaCred.ID != 0 { - return giteaCred, nil - } - return params.ForgeCredentials{}, runnerErrors.NewBadRequestError("credentials %s not found", credentialsName) -} diff --git a/runner/common/mocks/GithubClient.go b/runner/common/mocks/GithubClient.go index c1dbeae9..c867d32e 100644 --- a/runner/common/mocks/GithubClient.go +++ b/runner/common/mocks/GithubClient.go @@ -1,4 +1,4 @@ -// Code generated by mockery. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks @@ -9,8 +9,6 @@ import ( mock "github.com/stretchr/testify/mock" params "github.com/cloudbase/garm/params" - - url "net/url" ) // GithubClient is an autogenerated mock type for the GithubClient type @@ -18,14 +16,6 @@ type GithubClient struct { mock.Mock } -type GithubClient_Expecter struct { - mock *mock.Mock -} - -func (_m *GithubClient) EXPECT() *GithubClient_Expecter { - return &GithubClient_Expecter{mock: &_m.Mock} -} - // CreateEntityHook provides a mock function with given fields: ctx, hook func (_m *GithubClient) CreateEntityHook(ctx context.Context, hook *github.Hook) (*github.Hook, error) { ret := _m.Called(ctx, hook) @@ -56,35 +46,6 @@ func (_m *GithubClient) CreateEntityHook(ctx context.Context, hook *github.Hook) return r0, r1 } -// GithubClient_CreateEntityHook_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateEntityHook' -type GithubClient_CreateEntityHook_Call struct { - *mock.Call -} - -// CreateEntityHook is a helper method to define mock.On call -// - ctx context.Context -// - hook *github.Hook -func (_e *GithubClient_Expecter) CreateEntityHook(ctx interface{}, hook interface{}) *GithubClient_CreateEntityHook_Call { - return &GithubClient_CreateEntityHook_Call{Call: _e.mock.On("CreateEntityHook", ctx, hook)} -} - -func (_c *GithubClient_CreateEntityHook_Call) Run(run func(ctx context.Context, hook *github.Hook)) *GithubClient_CreateEntityHook_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(*github.Hook)) - }) - return _c -} - -func (_c *GithubClient_CreateEntityHook_Call) Return(ret *github.Hook, err error) *GithubClient_CreateEntityHook_Call { - _c.Call.Return(ret, err) - return _c -} - -func (_c *GithubClient_CreateEntityHook_Call) RunAndReturn(run func(context.Context, *github.Hook) (*github.Hook, error)) *GithubClient_CreateEntityHook_Call { - _c.Call.Return(run) - return _c -} - // CreateEntityRegistrationToken provides a mock function with given fields: ctx func (_m *GithubClient) CreateEntityRegistrationToken(ctx context.Context) (*github.RegistrationToken, *github.Response, error) { ret := _m.Called(ctx) @@ -124,34 +85,6 @@ func (_m *GithubClient) CreateEntityRegistrationToken(ctx context.Context) (*git return r0, r1, r2 } -// GithubClient_CreateEntityRegistrationToken_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateEntityRegistrationToken' -type GithubClient_CreateEntityRegistrationToken_Call struct { - *mock.Call -} - -// CreateEntityRegistrationToken is a helper method to define mock.On call -// - ctx context.Context -func (_e *GithubClient_Expecter) CreateEntityRegistrationToken(ctx interface{}) *GithubClient_CreateEntityRegistrationToken_Call { - return &GithubClient_CreateEntityRegistrationToken_Call{Call: _e.mock.On("CreateEntityRegistrationToken", ctx)} -} - -func (_c *GithubClient_CreateEntityRegistrationToken_Call) Run(run func(ctx context.Context)) *GithubClient_CreateEntityRegistrationToken_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *GithubClient_CreateEntityRegistrationToken_Call) Return(_a0 *github.RegistrationToken, _a1 *github.Response, _a2 error) *GithubClient_CreateEntityRegistrationToken_Call { - _c.Call.Return(_a0, _a1, _a2) - return _c -} - -func (_c *GithubClient_CreateEntityRegistrationToken_Call) RunAndReturn(run func(context.Context) (*github.RegistrationToken, *github.Response, error)) *GithubClient_CreateEntityRegistrationToken_Call { - _c.Call.Return(run) - return _c -} - // DeleteEntityHook provides a mock function with given fields: ctx, id func (_m *GithubClient) DeleteEntityHook(ctx context.Context, id int64) (*github.Response, error) { ret := _m.Called(ctx, id) @@ -182,80 +115,6 @@ func (_m *GithubClient) DeleteEntityHook(ctx context.Context, id int64) (*github return r0, r1 } -// GithubClient_DeleteEntityHook_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteEntityHook' -type GithubClient_DeleteEntityHook_Call struct { - *mock.Call -} - -// DeleteEntityHook is a helper method to define mock.On call -// - ctx context.Context -// - id int64 -func (_e *GithubClient_Expecter) DeleteEntityHook(ctx interface{}, id interface{}) *GithubClient_DeleteEntityHook_Call { - return &GithubClient_DeleteEntityHook_Call{Call: _e.mock.On("DeleteEntityHook", ctx, id)} -} - -func (_c *GithubClient_DeleteEntityHook_Call) Run(run func(ctx context.Context, id int64)) *GithubClient_DeleteEntityHook_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(int64)) - }) - return _c -} - -func (_c *GithubClient_DeleteEntityHook_Call) Return(ret *github.Response, err error) *GithubClient_DeleteEntityHook_Call { - _c.Call.Return(ret, err) - return _c -} - -func (_c *GithubClient_DeleteEntityHook_Call) RunAndReturn(run func(context.Context, int64) (*github.Response, error)) *GithubClient_DeleteEntityHook_Call { - _c.Call.Return(run) - return _c -} - -// GetEntity provides a mock function with no fields -func (_m *GithubClient) GetEntity() params.ForgeEntity { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for GetEntity") - } - - var r0 params.ForgeEntity - if rf, ok := ret.Get(0).(func() params.ForgeEntity); ok { - r0 = rf() - } else { - r0 = ret.Get(0).(params.ForgeEntity) - } - - return r0 -} - -// GithubClient_GetEntity_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEntity' -type GithubClient_GetEntity_Call struct { - *mock.Call -} - -// GetEntity is a helper method to define mock.On call -func (_e *GithubClient_Expecter) GetEntity() *GithubClient_GetEntity_Call { - return &GithubClient_GetEntity_Call{Call: _e.mock.On("GetEntity")} -} - -func (_c *GithubClient_GetEntity_Call) Run(run func()) *GithubClient_GetEntity_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *GithubClient_GetEntity_Call) Return(_a0 params.ForgeEntity) *GithubClient_GetEntity_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *GithubClient_GetEntity_Call) RunAndReturn(run func() params.ForgeEntity) *GithubClient_GetEntity_Call { - _c.Call.Return(run) - return _c -} - // GetEntityHook provides a mock function with given fields: ctx, id func (_m *GithubClient) GetEntityHook(ctx context.Context, id int64) (*github.Hook, error) { ret := _m.Called(ctx, id) @@ -286,35 +145,6 @@ func (_m *GithubClient) GetEntityHook(ctx context.Context, id int64) (*github.Ho return r0, r1 } -// GithubClient_GetEntityHook_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEntityHook' -type GithubClient_GetEntityHook_Call struct { - *mock.Call -} - -// GetEntityHook is a helper method to define mock.On call -// - ctx context.Context -// - id int64 -func (_e *GithubClient_Expecter) GetEntityHook(ctx interface{}, id interface{}) *GithubClient_GetEntityHook_Call { - return &GithubClient_GetEntityHook_Call{Call: _e.mock.On("GetEntityHook", ctx, id)} -} - -func (_c *GithubClient_GetEntityHook_Call) Run(run func(ctx context.Context, id int64)) *GithubClient_GetEntityHook_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(int64)) - }) - return _c -} - -func (_c *GithubClient_GetEntityHook_Call) Return(ret *github.Hook, err error) *GithubClient_GetEntityHook_Call { - _c.Call.Return(ret, err) - return _c -} - -func (_c *GithubClient_GetEntityHook_Call) RunAndReturn(run func(context.Context, int64) (*github.Hook, error)) *GithubClient_GetEntityHook_Call { - _c.Call.Return(run) - return _c -} - // GetEntityJITConfig provides a mock function with given fields: ctx, instance, pool, labels func (_m *GithubClient) GetEntityJITConfig(ctx context.Context, instance string, pool params.Pool, labels []string) (map[string]string, *github.Runner, error) { ret := _m.Called(ctx, instance, pool, labels) @@ -354,94 +184,6 @@ func (_m *GithubClient) GetEntityJITConfig(ctx context.Context, instance string, return r0, r1, r2 } -// GithubClient_GetEntityJITConfig_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEntityJITConfig' -type GithubClient_GetEntityJITConfig_Call struct { - *mock.Call -} - -// GetEntityJITConfig is a helper method to define mock.On call -// - ctx context.Context -// - instance string -// - pool params.Pool -// - labels []string -func (_e *GithubClient_Expecter) GetEntityJITConfig(ctx interface{}, instance interface{}, pool interface{}, labels interface{}) *GithubClient_GetEntityJITConfig_Call { - return &GithubClient_GetEntityJITConfig_Call{Call: _e.mock.On("GetEntityJITConfig", ctx, instance, pool, labels)} -} - -func (_c *GithubClient_GetEntityJITConfig_Call) Run(run func(ctx context.Context, instance string, pool params.Pool, labels []string)) *GithubClient_GetEntityJITConfig_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(params.Pool), args[3].([]string)) - }) - return _c -} - -func (_c *GithubClient_GetEntityJITConfig_Call) Return(jitConfigMap map[string]string, runner *github.Runner, err error) *GithubClient_GetEntityJITConfig_Call { - _c.Call.Return(jitConfigMap, runner, err) - return _c -} - -func (_c *GithubClient_GetEntityJITConfig_Call) RunAndReturn(run func(context.Context, string, params.Pool, []string) (map[string]string, *github.Runner, error)) *GithubClient_GetEntityJITConfig_Call { - _c.Call.Return(run) - return _c -} - -// GetEntityRunnerGroupIDByName provides a mock function with given fields: ctx, runnerGroupName -func (_m *GithubClient) GetEntityRunnerGroupIDByName(ctx context.Context, runnerGroupName string) (int64, error) { - ret := _m.Called(ctx, runnerGroupName) - - if len(ret) == 0 { - panic("no return value specified for GetEntityRunnerGroupIDByName") - } - - var r0 int64 - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (int64, error)); ok { - return rf(ctx, runnerGroupName) - } - if rf, ok := ret.Get(0).(func(context.Context, string) int64); ok { - r0 = rf(ctx, runnerGroupName) - } else { - r0 = ret.Get(0).(int64) - } - - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, runnerGroupName) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GithubClient_GetEntityRunnerGroupIDByName_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEntityRunnerGroupIDByName' -type GithubClient_GetEntityRunnerGroupIDByName_Call struct { - *mock.Call -} - -// GetEntityRunnerGroupIDByName is a helper method to define mock.On call -// - ctx context.Context -// - runnerGroupName string -func (_e *GithubClient_Expecter) GetEntityRunnerGroupIDByName(ctx interface{}, runnerGroupName interface{}) *GithubClient_GetEntityRunnerGroupIDByName_Call { - return &GithubClient_GetEntityRunnerGroupIDByName_Call{Call: _e.mock.On("GetEntityRunnerGroupIDByName", ctx, runnerGroupName)} -} - -func (_c *GithubClient_GetEntityRunnerGroupIDByName_Call) Run(run func(ctx context.Context, runnerGroupName string)) *GithubClient_GetEntityRunnerGroupIDByName_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) - }) - return _c -} - -func (_c *GithubClient_GetEntityRunnerGroupIDByName_Call) Return(_a0 int64, _a1 error) *GithubClient_GetEntityRunnerGroupIDByName_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *GithubClient_GetEntityRunnerGroupIDByName_Call) RunAndReturn(run func(context.Context, string) (int64, error)) *GithubClient_GetEntityRunnerGroupIDByName_Call { - _c.Call.Return(run) - return _c -} - // GetWorkflowJobByID provides a mock function with given fields: ctx, owner, repo, jobID func (_m *GithubClient) GetWorkflowJobByID(ctx context.Context, owner string, repo string, jobID int64) (*github.WorkflowJob, *github.Response, error) { ret := _m.Called(ctx, owner, repo, jobID) @@ -481,84 +223,6 @@ func (_m *GithubClient) GetWorkflowJobByID(ctx context.Context, owner string, re return r0, r1, r2 } -// GithubClient_GetWorkflowJobByID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetWorkflowJobByID' -type GithubClient_GetWorkflowJobByID_Call struct { - *mock.Call -} - -// GetWorkflowJobByID is a helper method to define mock.On call -// - ctx context.Context -// - owner string -// - repo string -// - jobID int64 -func (_e *GithubClient_Expecter) GetWorkflowJobByID(ctx interface{}, owner interface{}, repo interface{}, jobID interface{}) *GithubClient_GetWorkflowJobByID_Call { - return &GithubClient_GetWorkflowJobByID_Call{Call: _e.mock.On("GetWorkflowJobByID", ctx, owner, repo, jobID)} -} - -func (_c *GithubClient_GetWorkflowJobByID_Call) Run(run func(ctx context.Context, owner string, repo string, jobID int64)) *GithubClient_GetWorkflowJobByID_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(string), args[3].(int64)) - }) - return _c -} - -func (_c *GithubClient_GetWorkflowJobByID_Call) Return(_a0 *github.WorkflowJob, _a1 *github.Response, _a2 error) *GithubClient_GetWorkflowJobByID_Call { - _c.Call.Return(_a0, _a1, _a2) - return _c -} - -func (_c *GithubClient_GetWorkflowJobByID_Call) RunAndReturn(run func(context.Context, string, string, int64) (*github.WorkflowJob, *github.Response, error)) *GithubClient_GetWorkflowJobByID_Call { - _c.Call.Return(run) - return _c -} - -// GithubBaseURL provides a mock function with no fields -func (_m *GithubClient) GithubBaseURL() *url.URL { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for GithubBaseURL") - } - - var r0 *url.URL - if rf, ok := ret.Get(0).(func() *url.URL); ok { - r0 = rf() - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*url.URL) - } - } - - return r0 -} - -// GithubClient_GithubBaseURL_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GithubBaseURL' -type GithubClient_GithubBaseURL_Call struct { - *mock.Call -} - -// GithubBaseURL is a helper method to define mock.On call -func (_e *GithubClient_Expecter) GithubBaseURL() *GithubClient_GithubBaseURL_Call { - return &GithubClient_GithubBaseURL_Call{Call: _e.mock.On("GithubBaseURL")} -} - -func (_c *GithubClient_GithubBaseURL_Call) Run(run func()) *GithubClient_GithubBaseURL_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *GithubClient_GithubBaseURL_Call) Return(_a0 *url.URL) *GithubClient_GithubBaseURL_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *GithubClient_GithubBaseURL_Call) RunAndReturn(run func() *url.URL) *GithubClient_GithubBaseURL_Call { - _c.Call.Return(run) - return _c -} - // ListEntityHooks provides a mock function with given fields: ctx, opts func (_m *GithubClient) ListEntityHooks(ctx context.Context, opts *github.ListOptions) ([]*github.Hook, *github.Response, error) { ret := _m.Called(ctx, opts) @@ -598,35 +262,6 @@ func (_m *GithubClient) ListEntityHooks(ctx context.Context, opts *github.ListOp return r0, r1, r2 } -// GithubClient_ListEntityHooks_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListEntityHooks' -type GithubClient_ListEntityHooks_Call struct { - *mock.Call -} - -// ListEntityHooks is a helper method to define mock.On call -// - ctx context.Context -// - opts *github.ListOptions -func (_e *GithubClient_Expecter) ListEntityHooks(ctx interface{}, opts interface{}) *GithubClient_ListEntityHooks_Call { - return &GithubClient_ListEntityHooks_Call{Call: _e.mock.On("ListEntityHooks", ctx, opts)} -} - -func (_c *GithubClient_ListEntityHooks_Call) Run(run func(ctx context.Context, opts *github.ListOptions)) *GithubClient_ListEntityHooks_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(*github.ListOptions)) - }) - return _c -} - -func (_c *GithubClient_ListEntityHooks_Call) Return(ret []*github.Hook, response *github.Response, err error) *GithubClient_ListEntityHooks_Call { - _c.Call.Return(ret, response, err) - return _c -} - -func (_c *GithubClient_ListEntityHooks_Call) RunAndReturn(run func(context.Context, *github.ListOptions) ([]*github.Hook, *github.Response, error)) *GithubClient_ListEntityHooks_Call { - _c.Call.Return(run) - return _c -} - // ListEntityRunnerApplicationDownloads provides a mock function with given fields: ctx func (_m *GithubClient) ListEntityRunnerApplicationDownloads(ctx context.Context) ([]*github.RunnerApplicationDownload, *github.Response, error) { ret := _m.Called(ctx) @@ -666,34 +301,6 @@ func (_m *GithubClient) ListEntityRunnerApplicationDownloads(ctx context.Context return r0, r1, r2 } -// GithubClient_ListEntityRunnerApplicationDownloads_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListEntityRunnerApplicationDownloads' -type GithubClient_ListEntityRunnerApplicationDownloads_Call struct { - *mock.Call -} - -// ListEntityRunnerApplicationDownloads is a helper method to define mock.On call -// - ctx context.Context -func (_e *GithubClient_Expecter) ListEntityRunnerApplicationDownloads(ctx interface{}) *GithubClient_ListEntityRunnerApplicationDownloads_Call { - return &GithubClient_ListEntityRunnerApplicationDownloads_Call{Call: _e.mock.On("ListEntityRunnerApplicationDownloads", ctx)} -} - -func (_c *GithubClient_ListEntityRunnerApplicationDownloads_Call) Run(run func(ctx context.Context)) *GithubClient_ListEntityRunnerApplicationDownloads_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *GithubClient_ListEntityRunnerApplicationDownloads_Call) Return(_a0 []*github.RunnerApplicationDownload, _a1 *github.Response, _a2 error) *GithubClient_ListEntityRunnerApplicationDownloads_Call { - _c.Call.Return(_a0, _a1, _a2) - return _c -} - -func (_c *GithubClient_ListEntityRunnerApplicationDownloads_Call) RunAndReturn(run func(context.Context) ([]*github.RunnerApplicationDownload, *github.Response, error)) *GithubClient_ListEntityRunnerApplicationDownloads_Call { - _c.Call.Return(run) - return _c -} - // ListEntityRunners provides a mock function with given fields: ctx, opts func (_m *GithubClient) ListEntityRunners(ctx context.Context, opts *github.ListRunnersOptions) (*github.Runners, *github.Response, error) { ret := _m.Called(ctx, opts) @@ -733,35 +340,6 @@ func (_m *GithubClient) ListEntityRunners(ctx context.Context, opts *github.List return r0, r1, r2 } -// GithubClient_ListEntityRunners_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListEntityRunners' -type GithubClient_ListEntityRunners_Call struct { - *mock.Call -} - -// ListEntityRunners is a helper method to define mock.On call -// - ctx context.Context -// - opts *github.ListRunnersOptions -func (_e *GithubClient_Expecter) ListEntityRunners(ctx interface{}, opts interface{}) *GithubClient_ListEntityRunners_Call { - return &GithubClient_ListEntityRunners_Call{Call: _e.mock.On("ListEntityRunners", ctx, opts)} -} - -func (_c *GithubClient_ListEntityRunners_Call) Run(run func(ctx context.Context, opts *github.ListRunnersOptions)) *GithubClient_ListEntityRunners_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(*github.ListRunnersOptions)) - }) - return _c -} - -func (_c *GithubClient_ListEntityRunners_Call) Return(_a0 *github.Runners, _a1 *github.Response, _a2 error) *GithubClient_ListEntityRunners_Call { - _c.Call.Return(_a0, _a1, _a2) - return _c -} - -func (_c *GithubClient_ListEntityRunners_Call) RunAndReturn(run func(context.Context, *github.ListRunnersOptions) (*github.Runners, *github.Response, error)) *GithubClient_ListEntityRunners_Call { - _c.Call.Return(run) - return _c -} - // PingEntityHook provides a mock function with given fields: ctx, id func (_m *GithubClient) PingEntityHook(ctx context.Context, id int64) (*github.Response, error) { ret := _m.Called(ctx, id) @@ -792,138 +370,34 @@ func (_m *GithubClient) PingEntityHook(ctx context.Context, id int64) (*github.R return r0, r1 } -// GithubClient_PingEntityHook_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'PingEntityHook' -type GithubClient_PingEntityHook_Call struct { - *mock.Call -} - -// PingEntityHook is a helper method to define mock.On call -// - ctx context.Context -// - id int64 -func (_e *GithubClient_Expecter) PingEntityHook(ctx interface{}, id interface{}) *GithubClient_PingEntityHook_Call { - return &GithubClient_PingEntityHook_Call{Call: _e.mock.On("PingEntityHook", ctx, id)} -} - -func (_c *GithubClient_PingEntityHook_Call) Run(run func(ctx context.Context, id int64)) *GithubClient_PingEntityHook_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(int64)) - }) - return _c -} - -func (_c *GithubClient_PingEntityHook_Call) Return(ret *github.Response, err error) *GithubClient_PingEntityHook_Call { - _c.Call.Return(ret, err) - return _c -} - -func (_c *GithubClient_PingEntityHook_Call) RunAndReturn(run func(context.Context, int64) (*github.Response, error)) *GithubClient_PingEntityHook_Call { - _c.Call.Return(run) - return _c -} - -// RateLimit provides a mock function with given fields: ctx -func (_m *GithubClient) RateLimit(ctx context.Context) (*github.RateLimits, error) { - ret := _m.Called(ctx) - - if len(ret) == 0 { - panic("no return value specified for RateLimit") - } - - var r0 *github.RateLimits - var r1 error - if rf, ok := ret.Get(0).(func(context.Context) (*github.RateLimits, error)); ok { - return rf(ctx) - } - if rf, ok := ret.Get(0).(func(context.Context) *github.RateLimits); ok { - r0 = rf(ctx) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*github.RateLimits) - } - } - - if rf, ok := ret.Get(1).(func(context.Context) error); ok { - r1 = rf(ctx) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GithubClient_RateLimit_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RateLimit' -type GithubClient_RateLimit_Call struct { - *mock.Call -} - -// RateLimit is a helper method to define mock.On call -// - ctx context.Context -func (_e *GithubClient_Expecter) RateLimit(ctx interface{}) *GithubClient_RateLimit_Call { - return &GithubClient_RateLimit_Call{Call: _e.mock.On("RateLimit", ctx)} -} - -func (_c *GithubClient_RateLimit_Call) Run(run func(ctx context.Context)) *GithubClient_RateLimit_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *GithubClient_RateLimit_Call) Return(_a0 *github.RateLimits, _a1 error) *GithubClient_RateLimit_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *GithubClient_RateLimit_Call) RunAndReturn(run func(context.Context) (*github.RateLimits, error)) *GithubClient_RateLimit_Call { - _c.Call.Return(run) - return _c -} - // RemoveEntityRunner provides a mock function with given fields: ctx, runnerID -func (_m *GithubClient) RemoveEntityRunner(ctx context.Context, runnerID int64) error { +func (_m *GithubClient) RemoveEntityRunner(ctx context.Context, runnerID int64) (*github.Response, error) { ret := _m.Called(ctx, runnerID) if len(ret) == 0 { panic("no return value specified for RemoveEntityRunner") } - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, int64) error); ok { + var r0 *github.Response + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, int64) (*github.Response, error)); ok { + return rf(ctx, runnerID) + } + if rf, ok := ret.Get(0).(func(context.Context, int64) *github.Response); ok { r0 = rf(ctx, runnerID) } else { - r0 = ret.Error(0) + if ret.Get(0) != nil { + r0 = ret.Get(0).(*github.Response) + } } - return r0 -} + if rf, ok := ret.Get(1).(func(context.Context, int64) error); ok { + r1 = rf(ctx, runnerID) + } else { + r1 = ret.Error(1) + } -// GithubClient_RemoveEntityRunner_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RemoveEntityRunner' -type GithubClient_RemoveEntityRunner_Call struct { - *mock.Call -} - -// RemoveEntityRunner is a helper method to define mock.On call -// - ctx context.Context -// - runnerID int64 -func (_e *GithubClient_Expecter) RemoveEntityRunner(ctx interface{}, runnerID interface{}) *GithubClient_RemoveEntityRunner_Call { - return &GithubClient_RemoveEntityRunner_Call{Call: _e.mock.On("RemoveEntityRunner", ctx, runnerID)} -} - -func (_c *GithubClient_RemoveEntityRunner_Call) Run(run func(ctx context.Context, runnerID int64)) *GithubClient_RemoveEntityRunner_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(int64)) - }) - return _c -} - -func (_c *GithubClient_RemoveEntityRunner_Call) Return(_a0 error) *GithubClient_RemoveEntityRunner_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *GithubClient_RemoveEntityRunner_Call) RunAndReturn(run func(context.Context, int64) error) *GithubClient_RemoveEntityRunner_Call { - _c.Call.Return(run) - return _c + return r0, r1 } // NewGithubClient creates a new instance of GithubClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. diff --git a/runner/common/mocks/GithubEntityOperations.go b/runner/common/mocks/GithubEntityOperations.go index 0b3c3f83..0015a485 100644 --- a/runner/common/mocks/GithubEntityOperations.go +++ b/runner/common/mocks/GithubEntityOperations.go @@ -1,4 +1,4 @@ -// Code generated by mockery. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks @@ -9,8 +9,6 @@ import ( mock "github.com/stretchr/testify/mock" params "github.com/cloudbase/garm/params" - - url "net/url" ) // GithubEntityOperations is an autogenerated mock type for the GithubEntityOperations type @@ -18,14 +16,6 @@ type GithubEntityOperations struct { mock.Mock } -type GithubEntityOperations_Expecter struct { - mock *mock.Mock -} - -func (_m *GithubEntityOperations) EXPECT() *GithubEntityOperations_Expecter { - return &GithubEntityOperations_Expecter{mock: &_m.Mock} -} - // CreateEntityHook provides a mock function with given fields: ctx, hook func (_m *GithubEntityOperations) CreateEntityHook(ctx context.Context, hook *github.Hook) (*github.Hook, error) { ret := _m.Called(ctx, hook) @@ -56,35 +46,6 @@ func (_m *GithubEntityOperations) CreateEntityHook(ctx context.Context, hook *gi return r0, r1 } -// GithubEntityOperations_CreateEntityHook_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateEntityHook' -type GithubEntityOperations_CreateEntityHook_Call struct { - *mock.Call -} - -// CreateEntityHook is a helper method to define mock.On call -// - ctx context.Context -// - hook *github.Hook -func (_e *GithubEntityOperations_Expecter) CreateEntityHook(ctx interface{}, hook interface{}) *GithubEntityOperations_CreateEntityHook_Call { - return &GithubEntityOperations_CreateEntityHook_Call{Call: _e.mock.On("CreateEntityHook", ctx, hook)} -} - -func (_c *GithubEntityOperations_CreateEntityHook_Call) Run(run func(ctx context.Context, hook *github.Hook)) *GithubEntityOperations_CreateEntityHook_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(*github.Hook)) - }) - return _c -} - -func (_c *GithubEntityOperations_CreateEntityHook_Call) Return(ret *github.Hook, err error) *GithubEntityOperations_CreateEntityHook_Call { - _c.Call.Return(ret, err) - return _c -} - -func (_c *GithubEntityOperations_CreateEntityHook_Call) RunAndReturn(run func(context.Context, *github.Hook) (*github.Hook, error)) *GithubEntityOperations_CreateEntityHook_Call { - _c.Call.Return(run) - return _c -} - // CreateEntityRegistrationToken provides a mock function with given fields: ctx func (_m *GithubEntityOperations) CreateEntityRegistrationToken(ctx context.Context) (*github.RegistrationToken, *github.Response, error) { ret := _m.Called(ctx) @@ -124,34 +85,6 @@ func (_m *GithubEntityOperations) CreateEntityRegistrationToken(ctx context.Cont return r0, r1, r2 } -// GithubEntityOperations_CreateEntityRegistrationToken_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateEntityRegistrationToken' -type GithubEntityOperations_CreateEntityRegistrationToken_Call struct { - *mock.Call -} - -// CreateEntityRegistrationToken is a helper method to define mock.On call -// - ctx context.Context -func (_e *GithubEntityOperations_Expecter) CreateEntityRegistrationToken(ctx interface{}) *GithubEntityOperations_CreateEntityRegistrationToken_Call { - return &GithubEntityOperations_CreateEntityRegistrationToken_Call{Call: _e.mock.On("CreateEntityRegistrationToken", ctx)} -} - -func (_c *GithubEntityOperations_CreateEntityRegistrationToken_Call) Run(run func(ctx context.Context)) *GithubEntityOperations_CreateEntityRegistrationToken_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *GithubEntityOperations_CreateEntityRegistrationToken_Call) Return(_a0 *github.RegistrationToken, _a1 *github.Response, _a2 error) *GithubEntityOperations_CreateEntityRegistrationToken_Call { - _c.Call.Return(_a0, _a1, _a2) - return _c -} - -func (_c *GithubEntityOperations_CreateEntityRegistrationToken_Call) RunAndReturn(run func(context.Context) (*github.RegistrationToken, *github.Response, error)) *GithubEntityOperations_CreateEntityRegistrationToken_Call { - _c.Call.Return(run) - return _c -} - // DeleteEntityHook provides a mock function with given fields: ctx, id func (_m *GithubEntityOperations) DeleteEntityHook(ctx context.Context, id int64) (*github.Response, error) { ret := _m.Called(ctx, id) @@ -182,80 +115,6 @@ func (_m *GithubEntityOperations) DeleteEntityHook(ctx context.Context, id int64 return r0, r1 } -// GithubEntityOperations_DeleteEntityHook_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteEntityHook' -type GithubEntityOperations_DeleteEntityHook_Call struct { - *mock.Call -} - -// DeleteEntityHook is a helper method to define mock.On call -// - ctx context.Context -// - id int64 -func (_e *GithubEntityOperations_Expecter) DeleteEntityHook(ctx interface{}, id interface{}) *GithubEntityOperations_DeleteEntityHook_Call { - return &GithubEntityOperations_DeleteEntityHook_Call{Call: _e.mock.On("DeleteEntityHook", ctx, id)} -} - -func (_c *GithubEntityOperations_DeleteEntityHook_Call) Run(run func(ctx context.Context, id int64)) *GithubEntityOperations_DeleteEntityHook_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(int64)) - }) - return _c -} - -func (_c *GithubEntityOperations_DeleteEntityHook_Call) Return(ret *github.Response, err error) *GithubEntityOperations_DeleteEntityHook_Call { - _c.Call.Return(ret, err) - return _c -} - -func (_c *GithubEntityOperations_DeleteEntityHook_Call) RunAndReturn(run func(context.Context, int64) (*github.Response, error)) *GithubEntityOperations_DeleteEntityHook_Call { - _c.Call.Return(run) - return _c -} - -// GetEntity provides a mock function with no fields -func (_m *GithubEntityOperations) GetEntity() params.ForgeEntity { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for GetEntity") - } - - var r0 params.ForgeEntity - if rf, ok := ret.Get(0).(func() params.ForgeEntity); ok { - r0 = rf() - } else { - r0 = ret.Get(0).(params.ForgeEntity) - } - - return r0 -} - -// GithubEntityOperations_GetEntity_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEntity' -type GithubEntityOperations_GetEntity_Call struct { - *mock.Call -} - -// GetEntity is a helper method to define mock.On call -func (_e *GithubEntityOperations_Expecter) GetEntity() *GithubEntityOperations_GetEntity_Call { - return &GithubEntityOperations_GetEntity_Call{Call: _e.mock.On("GetEntity")} -} - -func (_c *GithubEntityOperations_GetEntity_Call) Run(run func()) *GithubEntityOperations_GetEntity_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *GithubEntityOperations_GetEntity_Call) Return(_a0 params.ForgeEntity) *GithubEntityOperations_GetEntity_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *GithubEntityOperations_GetEntity_Call) RunAndReturn(run func() params.ForgeEntity) *GithubEntityOperations_GetEntity_Call { - _c.Call.Return(run) - return _c -} - // GetEntityHook provides a mock function with given fields: ctx, id func (_m *GithubEntityOperations) GetEntityHook(ctx context.Context, id int64) (*github.Hook, error) { ret := _m.Called(ctx, id) @@ -286,35 +145,6 @@ func (_m *GithubEntityOperations) GetEntityHook(ctx context.Context, id int64) ( return r0, r1 } -// GithubEntityOperations_GetEntityHook_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEntityHook' -type GithubEntityOperations_GetEntityHook_Call struct { - *mock.Call -} - -// GetEntityHook is a helper method to define mock.On call -// - ctx context.Context -// - id int64 -func (_e *GithubEntityOperations_Expecter) GetEntityHook(ctx interface{}, id interface{}) *GithubEntityOperations_GetEntityHook_Call { - return &GithubEntityOperations_GetEntityHook_Call{Call: _e.mock.On("GetEntityHook", ctx, id)} -} - -func (_c *GithubEntityOperations_GetEntityHook_Call) Run(run func(ctx context.Context, id int64)) *GithubEntityOperations_GetEntityHook_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(int64)) - }) - return _c -} - -func (_c *GithubEntityOperations_GetEntityHook_Call) Return(ret *github.Hook, err error) *GithubEntityOperations_GetEntityHook_Call { - _c.Call.Return(ret, err) - return _c -} - -func (_c *GithubEntityOperations_GetEntityHook_Call) RunAndReturn(run func(context.Context, int64) (*github.Hook, error)) *GithubEntityOperations_GetEntityHook_Call { - _c.Call.Return(run) - return _c -} - // GetEntityJITConfig provides a mock function with given fields: ctx, instance, pool, labels func (_m *GithubEntityOperations) GetEntityJITConfig(ctx context.Context, instance string, pool params.Pool, labels []string) (map[string]string, *github.Runner, error) { ret := _m.Called(ctx, instance, pool, labels) @@ -354,141 +184,6 @@ func (_m *GithubEntityOperations) GetEntityJITConfig(ctx context.Context, instan return r0, r1, r2 } -// GithubEntityOperations_GetEntityJITConfig_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEntityJITConfig' -type GithubEntityOperations_GetEntityJITConfig_Call struct { - *mock.Call -} - -// GetEntityJITConfig is a helper method to define mock.On call -// - ctx context.Context -// - instance string -// - pool params.Pool -// - labels []string -func (_e *GithubEntityOperations_Expecter) GetEntityJITConfig(ctx interface{}, instance interface{}, pool interface{}, labels interface{}) *GithubEntityOperations_GetEntityJITConfig_Call { - return &GithubEntityOperations_GetEntityJITConfig_Call{Call: _e.mock.On("GetEntityJITConfig", ctx, instance, pool, labels)} -} - -func (_c *GithubEntityOperations_GetEntityJITConfig_Call) Run(run func(ctx context.Context, instance string, pool params.Pool, labels []string)) *GithubEntityOperations_GetEntityJITConfig_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(params.Pool), args[3].([]string)) - }) - return _c -} - -func (_c *GithubEntityOperations_GetEntityJITConfig_Call) Return(jitConfigMap map[string]string, runner *github.Runner, err error) *GithubEntityOperations_GetEntityJITConfig_Call { - _c.Call.Return(jitConfigMap, runner, err) - return _c -} - -func (_c *GithubEntityOperations_GetEntityJITConfig_Call) RunAndReturn(run func(context.Context, string, params.Pool, []string) (map[string]string, *github.Runner, error)) *GithubEntityOperations_GetEntityJITConfig_Call { - _c.Call.Return(run) - return _c -} - -// GetEntityRunnerGroupIDByName provides a mock function with given fields: ctx, runnerGroupName -func (_m *GithubEntityOperations) GetEntityRunnerGroupIDByName(ctx context.Context, runnerGroupName string) (int64, error) { - ret := _m.Called(ctx, runnerGroupName) - - if len(ret) == 0 { - panic("no return value specified for GetEntityRunnerGroupIDByName") - } - - var r0 int64 - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (int64, error)); ok { - return rf(ctx, runnerGroupName) - } - if rf, ok := ret.Get(0).(func(context.Context, string) int64); ok { - r0 = rf(ctx, runnerGroupName) - } else { - r0 = ret.Get(0).(int64) - } - - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, runnerGroupName) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GithubEntityOperations_GetEntityRunnerGroupIDByName_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEntityRunnerGroupIDByName' -type GithubEntityOperations_GetEntityRunnerGroupIDByName_Call struct { - *mock.Call -} - -// GetEntityRunnerGroupIDByName is a helper method to define mock.On call -// - ctx context.Context -// - runnerGroupName string -func (_e *GithubEntityOperations_Expecter) GetEntityRunnerGroupIDByName(ctx interface{}, runnerGroupName interface{}) *GithubEntityOperations_GetEntityRunnerGroupIDByName_Call { - return &GithubEntityOperations_GetEntityRunnerGroupIDByName_Call{Call: _e.mock.On("GetEntityRunnerGroupIDByName", ctx, runnerGroupName)} -} - -func (_c *GithubEntityOperations_GetEntityRunnerGroupIDByName_Call) Run(run func(ctx context.Context, runnerGroupName string)) *GithubEntityOperations_GetEntityRunnerGroupIDByName_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) - }) - return _c -} - -func (_c *GithubEntityOperations_GetEntityRunnerGroupIDByName_Call) Return(_a0 int64, _a1 error) *GithubEntityOperations_GetEntityRunnerGroupIDByName_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *GithubEntityOperations_GetEntityRunnerGroupIDByName_Call) RunAndReturn(run func(context.Context, string) (int64, error)) *GithubEntityOperations_GetEntityRunnerGroupIDByName_Call { - _c.Call.Return(run) - return _c -} - -// GithubBaseURL provides a mock function with no fields -func (_m *GithubEntityOperations) GithubBaseURL() *url.URL { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for GithubBaseURL") - } - - var r0 *url.URL - if rf, ok := ret.Get(0).(func() *url.URL); ok { - r0 = rf() - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*url.URL) - } - } - - return r0 -} - -// GithubEntityOperations_GithubBaseURL_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GithubBaseURL' -type GithubEntityOperations_GithubBaseURL_Call struct { - *mock.Call -} - -// GithubBaseURL is a helper method to define mock.On call -func (_e *GithubEntityOperations_Expecter) GithubBaseURL() *GithubEntityOperations_GithubBaseURL_Call { - return &GithubEntityOperations_GithubBaseURL_Call{Call: _e.mock.On("GithubBaseURL")} -} - -func (_c *GithubEntityOperations_GithubBaseURL_Call) Run(run func()) *GithubEntityOperations_GithubBaseURL_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *GithubEntityOperations_GithubBaseURL_Call) Return(_a0 *url.URL) *GithubEntityOperations_GithubBaseURL_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *GithubEntityOperations_GithubBaseURL_Call) RunAndReturn(run func() *url.URL) *GithubEntityOperations_GithubBaseURL_Call { - _c.Call.Return(run) - return _c -} - // ListEntityHooks provides a mock function with given fields: ctx, opts func (_m *GithubEntityOperations) ListEntityHooks(ctx context.Context, opts *github.ListOptions) ([]*github.Hook, *github.Response, error) { ret := _m.Called(ctx, opts) @@ -528,35 +223,6 @@ func (_m *GithubEntityOperations) ListEntityHooks(ctx context.Context, opts *git return r0, r1, r2 } -// GithubEntityOperations_ListEntityHooks_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListEntityHooks' -type GithubEntityOperations_ListEntityHooks_Call struct { - *mock.Call -} - -// ListEntityHooks is a helper method to define mock.On call -// - ctx context.Context -// - opts *github.ListOptions -func (_e *GithubEntityOperations_Expecter) ListEntityHooks(ctx interface{}, opts interface{}) *GithubEntityOperations_ListEntityHooks_Call { - return &GithubEntityOperations_ListEntityHooks_Call{Call: _e.mock.On("ListEntityHooks", ctx, opts)} -} - -func (_c *GithubEntityOperations_ListEntityHooks_Call) Run(run func(ctx context.Context, opts *github.ListOptions)) *GithubEntityOperations_ListEntityHooks_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(*github.ListOptions)) - }) - return _c -} - -func (_c *GithubEntityOperations_ListEntityHooks_Call) Return(ret []*github.Hook, response *github.Response, err error) *GithubEntityOperations_ListEntityHooks_Call { - _c.Call.Return(ret, response, err) - return _c -} - -func (_c *GithubEntityOperations_ListEntityHooks_Call) RunAndReturn(run func(context.Context, *github.ListOptions) ([]*github.Hook, *github.Response, error)) *GithubEntityOperations_ListEntityHooks_Call { - _c.Call.Return(run) - return _c -} - // ListEntityRunnerApplicationDownloads provides a mock function with given fields: ctx func (_m *GithubEntityOperations) ListEntityRunnerApplicationDownloads(ctx context.Context) ([]*github.RunnerApplicationDownload, *github.Response, error) { ret := _m.Called(ctx) @@ -596,34 +262,6 @@ func (_m *GithubEntityOperations) ListEntityRunnerApplicationDownloads(ctx conte return r0, r1, r2 } -// GithubEntityOperations_ListEntityRunnerApplicationDownloads_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListEntityRunnerApplicationDownloads' -type GithubEntityOperations_ListEntityRunnerApplicationDownloads_Call struct { - *mock.Call -} - -// ListEntityRunnerApplicationDownloads is a helper method to define mock.On call -// - ctx context.Context -func (_e *GithubEntityOperations_Expecter) ListEntityRunnerApplicationDownloads(ctx interface{}) *GithubEntityOperations_ListEntityRunnerApplicationDownloads_Call { - return &GithubEntityOperations_ListEntityRunnerApplicationDownloads_Call{Call: _e.mock.On("ListEntityRunnerApplicationDownloads", ctx)} -} - -func (_c *GithubEntityOperations_ListEntityRunnerApplicationDownloads_Call) Run(run func(ctx context.Context)) *GithubEntityOperations_ListEntityRunnerApplicationDownloads_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *GithubEntityOperations_ListEntityRunnerApplicationDownloads_Call) Return(_a0 []*github.RunnerApplicationDownload, _a1 *github.Response, _a2 error) *GithubEntityOperations_ListEntityRunnerApplicationDownloads_Call { - _c.Call.Return(_a0, _a1, _a2) - return _c -} - -func (_c *GithubEntityOperations_ListEntityRunnerApplicationDownloads_Call) RunAndReturn(run func(context.Context) ([]*github.RunnerApplicationDownload, *github.Response, error)) *GithubEntityOperations_ListEntityRunnerApplicationDownloads_Call { - _c.Call.Return(run) - return _c -} - // ListEntityRunners provides a mock function with given fields: ctx, opts func (_m *GithubEntityOperations) ListEntityRunners(ctx context.Context, opts *github.ListRunnersOptions) (*github.Runners, *github.Response, error) { ret := _m.Called(ctx, opts) @@ -663,35 +301,6 @@ func (_m *GithubEntityOperations) ListEntityRunners(ctx context.Context, opts *g return r0, r1, r2 } -// GithubEntityOperations_ListEntityRunners_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListEntityRunners' -type GithubEntityOperations_ListEntityRunners_Call struct { - *mock.Call -} - -// ListEntityRunners is a helper method to define mock.On call -// - ctx context.Context -// - opts *github.ListRunnersOptions -func (_e *GithubEntityOperations_Expecter) ListEntityRunners(ctx interface{}, opts interface{}) *GithubEntityOperations_ListEntityRunners_Call { - return &GithubEntityOperations_ListEntityRunners_Call{Call: _e.mock.On("ListEntityRunners", ctx, opts)} -} - -func (_c *GithubEntityOperations_ListEntityRunners_Call) Run(run func(ctx context.Context, opts *github.ListRunnersOptions)) *GithubEntityOperations_ListEntityRunners_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(*github.ListRunnersOptions)) - }) - return _c -} - -func (_c *GithubEntityOperations_ListEntityRunners_Call) Return(_a0 *github.Runners, _a1 *github.Response, _a2 error) *GithubEntityOperations_ListEntityRunners_Call { - _c.Call.Return(_a0, _a1, _a2) - return _c -} - -func (_c *GithubEntityOperations_ListEntityRunners_Call) RunAndReturn(run func(context.Context, *github.ListRunnersOptions) (*github.Runners, *github.Response, error)) *GithubEntityOperations_ListEntityRunners_Call { - _c.Call.Return(run) - return _c -} - // PingEntityHook provides a mock function with given fields: ctx, id func (_m *GithubEntityOperations) PingEntityHook(ctx context.Context, id int64) (*github.Response, error) { ret := _m.Called(ctx, id) @@ -722,138 +331,34 @@ func (_m *GithubEntityOperations) PingEntityHook(ctx context.Context, id int64) return r0, r1 } -// GithubEntityOperations_PingEntityHook_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'PingEntityHook' -type GithubEntityOperations_PingEntityHook_Call struct { - *mock.Call -} - -// PingEntityHook is a helper method to define mock.On call -// - ctx context.Context -// - id int64 -func (_e *GithubEntityOperations_Expecter) PingEntityHook(ctx interface{}, id interface{}) *GithubEntityOperations_PingEntityHook_Call { - return &GithubEntityOperations_PingEntityHook_Call{Call: _e.mock.On("PingEntityHook", ctx, id)} -} - -func (_c *GithubEntityOperations_PingEntityHook_Call) Run(run func(ctx context.Context, id int64)) *GithubEntityOperations_PingEntityHook_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(int64)) - }) - return _c -} - -func (_c *GithubEntityOperations_PingEntityHook_Call) Return(ret *github.Response, err error) *GithubEntityOperations_PingEntityHook_Call { - _c.Call.Return(ret, err) - return _c -} - -func (_c *GithubEntityOperations_PingEntityHook_Call) RunAndReturn(run func(context.Context, int64) (*github.Response, error)) *GithubEntityOperations_PingEntityHook_Call { - _c.Call.Return(run) - return _c -} - -// RateLimit provides a mock function with given fields: ctx -func (_m *GithubEntityOperations) RateLimit(ctx context.Context) (*github.RateLimits, error) { - ret := _m.Called(ctx) - - if len(ret) == 0 { - panic("no return value specified for RateLimit") - } - - var r0 *github.RateLimits - var r1 error - if rf, ok := ret.Get(0).(func(context.Context) (*github.RateLimits, error)); ok { - return rf(ctx) - } - if rf, ok := ret.Get(0).(func(context.Context) *github.RateLimits); ok { - r0 = rf(ctx) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*github.RateLimits) - } - } - - if rf, ok := ret.Get(1).(func(context.Context) error); ok { - r1 = rf(ctx) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GithubEntityOperations_RateLimit_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RateLimit' -type GithubEntityOperations_RateLimit_Call struct { - *mock.Call -} - -// RateLimit is a helper method to define mock.On call -// - ctx context.Context -func (_e *GithubEntityOperations_Expecter) RateLimit(ctx interface{}) *GithubEntityOperations_RateLimit_Call { - return &GithubEntityOperations_RateLimit_Call{Call: _e.mock.On("RateLimit", ctx)} -} - -func (_c *GithubEntityOperations_RateLimit_Call) Run(run func(ctx context.Context)) *GithubEntityOperations_RateLimit_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *GithubEntityOperations_RateLimit_Call) Return(_a0 *github.RateLimits, _a1 error) *GithubEntityOperations_RateLimit_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *GithubEntityOperations_RateLimit_Call) RunAndReturn(run func(context.Context) (*github.RateLimits, error)) *GithubEntityOperations_RateLimit_Call { - _c.Call.Return(run) - return _c -} - // RemoveEntityRunner provides a mock function with given fields: ctx, runnerID -func (_m *GithubEntityOperations) RemoveEntityRunner(ctx context.Context, runnerID int64) error { +func (_m *GithubEntityOperations) RemoveEntityRunner(ctx context.Context, runnerID int64) (*github.Response, error) { ret := _m.Called(ctx, runnerID) if len(ret) == 0 { panic("no return value specified for RemoveEntityRunner") } - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, int64) error); ok { + var r0 *github.Response + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, int64) (*github.Response, error)); ok { + return rf(ctx, runnerID) + } + if rf, ok := ret.Get(0).(func(context.Context, int64) *github.Response); ok { r0 = rf(ctx, runnerID) } else { - r0 = ret.Error(0) + if ret.Get(0) != nil { + r0 = ret.Get(0).(*github.Response) + } } - return r0 -} + if rf, ok := ret.Get(1).(func(context.Context, int64) error); ok { + r1 = rf(ctx, runnerID) + } else { + r1 = ret.Error(1) + } -// GithubEntityOperations_RemoveEntityRunner_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RemoveEntityRunner' -type GithubEntityOperations_RemoveEntityRunner_Call struct { - *mock.Call -} - -// RemoveEntityRunner is a helper method to define mock.On call -// - ctx context.Context -// - runnerID int64 -func (_e *GithubEntityOperations_Expecter) RemoveEntityRunner(ctx interface{}, runnerID interface{}) *GithubEntityOperations_RemoveEntityRunner_Call { - return &GithubEntityOperations_RemoveEntityRunner_Call{Call: _e.mock.On("RemoveEntityRunner", ctx, runnerID)} -} - -func (_c *GithubEntityOperations_RemoveEntityRunner_Call) Run(run func(ctx context.Context, runnerID int64)) *GithubEntityOperations_RemoveEntityRunner_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(int64)) - }) - return _c -} - -func (_c *GithubEntityOperations_RemoveEntityRunner_Call) Return(_a0 error) *GithubEntityOperations_RemoveEntityRunner_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *GithubEntityOperations_RemoveEntityRunner_Call) RunAndReturn(run func(context.Context, int64) error) *GithubEntityOperations_RemoveEntityRunner_Call { - _c.Call.Return(run) - return _c + return r0, r1 } // NewGithubEntityOperations creates a new instance of GithubEntityOperations. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. diff --git a/runner/common/mocks/PoolManager.go b/runner/common/mocks/PoolManager.go index a1a62f4f..5bb16672 100644 --- a/runner/common/mocks/PoolManager.go +++ b/runner/common/mocks/PoolManager.go @@ -1,4 +1,4 @@ -// Code generated by mockery. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks @@ -14,12 +14,22 @@ type PoolManager struct { mock.Mock } -type PoolManager_Expecter struct { - mock *mock.Mock -} +// DeleteRunner provides a mock function with given fields: runner, forceRemove, bypassGHUnauthorizedError +func (_m *PoolManager) DeleteRunner(runner params.Instance, forceRemove bool, bypassGHUnauthorizedError bool) error { + ret := _m.Called(runner, forceRemove, bypassGHUnauthorizedError) -func (_m *PoolManager) EXPECT() *PoolManager_Expecter { - return &PoolManager_Expecter{mock: &_m.Mock} + if len(ret) == 0 { + panic("no return value specified for DeleteRunner") + } + + var r0 error + if rf, ok := ret.Get(0).(func(params.Instance, bool, bool) error); ok { + r0 = rf(runner, forceRemove, bypassGHUnauthorizedError) + } else { + r0 = ret.Error(0) + } + + return r0 } // GetWebhookInfo provides a mock function with given fields: ctx @@ -50,34 +60,6 @@ func (_m *PoolManager) GetWebhookInfo(ctx context.Context) (params.HookInfo, err return r0, r1 } -// PoolManager_GetWebhookInfo_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetWebhookInfo' -type PoolManager_GetWebhookInfo_Call struct { - *mock.Call -} - -// GetWebhookInfo is a helper method to define mock.On call -// - ctx context.Context -func (_e *PoolManager_Expecter) GetWebhookInfo(ctx interface{}) *PoolManager_GetWebhookInfo_Call { - return &PoolManager_GetWebhookInfo_Call{Call: _e.mock.On("GetWebhookInfo", ctx)} -} - -func (_c *PoolManager_GetWebhookInfo_Call) Run(run func(ctx context.Context)) *PoolManager_GetWebhookInfo_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *PoolManager_GetWebhookInfo_Call) Return(_a0 params.HookInfo, _a1 error) *PoolManager_GetWebhookInfo_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *PoolManager_GetWebhookInfo_Call) RunAndReturn(run func(context.Context) (params.HookInfo, error)) *PoolManager_GetWebhookInfo_Call { - _c.Call.Return(run) - return _c -} - // GithubRunnerRegistrationToken provides a mock function with no fields func (_m *PoolManager) GithubRunnerRegistrationToken() (string, error) { ret := _m.Called() @@ -106,33 +88,6 @@ func (_m *PoolManager) GithubRunnerRegistrationToken() (string, error) { return r0, r1 } -// PoolManager_GithubRunnerRegistrationToken_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GithubRunnerRegistrationToken' -type PoolManager_GithubRunnerRegistrationToken_Call struct { - *mock.Call -} - -// GithubRunnerRegistrationToken is a helper method to define mock.On call -func (_e *PoolManager_Expecter) GithubRunnerRegistrationToken() *PoolManager_GithubRunnerRegistrationToken_Call { - return &PoolManager_GithubRunnerRegistrationToken_Call{Call: _e.mock.On("GithubRunnerRegistrationToken")} -} - -func (_c *PoolManager_GithubRunnerRegistrationToken_Call) Run(run func()) *PoolManager_GithubRunnerRegistrationToken_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *PoolManager_GithubRunnerRegistrationToken_Call) Return(_a0 string, _a1 error) *PoolManager_GithubRunnerRegistrationToken_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *PoolManager_GithubRunnerRegistrationToken_Call) RunAndReturn(run func() (string, error)) *PoolManager_GithubRunnerRegistrationToken_Call { - _c.Call.Return(run) - return _c -} - // HandleWorkflowJob provides a mock function with given fields: job func (_m *PoolManager) HandleWorkflowJob(job params.WorkflowJob) error { ret := _m.Called(job) @@ -151,34 +106,6 @@ func (_m *PoolManager) HandleWorkflowJob(job params.WorkflowJob) error { return r0 } -// PoolManager_HandleWorkflowJob_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'HandleWorkflowJob' -type PoolManager_HandleWorkflowJob_Call struct { - *mock.Call -} - -// HandleWorkflowJob is a helper method to define mock.On call -// - job params.WorkflowJob -func (_e *PoolManager_Expecter) HandleWorkflowJob(job interface{}) *PoolManager_HandleWorkflowJob_Call { - return &PoolManager_HandleWorkflowJob_Call{Call: _e.mock.On("HandleWorkflowJob", job)} -} - -func (_c *PoolManager_HandleWorkflowJob_Call) Run(run func(job params.WorkflowJob)) *PoolManager_HandleWorkflowJob_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(params.WorkflowJob)) - }) - return _c -} - -func (_c *PoolManager_HandleWorkflowJob_Call) Return(_a0 error) *PoolManager_HandleWorkflowJob_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *PoolManager_HandleWorkflowJob_Call) RunAndReturn(run func(params.WorkflowJob) error) *PoolManager_HandleWorkflowJob_Call { - _c.Call.Return(run) - return _c -} - // ID provides a mock function with no fields func (_m *PoolManager) ID() string { ret := _m.Called() @@ -197,33 +124,6 @@ func (_m *PoolManager) ID() string { return r0 } -// PoolManager_ID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ID' -type PoolManager_ID_Call struct { - *mock.Call -} - -// ID is a helper method to define mock.On call -func (_e *PoolManager_Expecter) ID() *PoolManager_ID_Call { - return &PoolManager_ID_Call{Call: _e.mock.On("ID")} -} - -func (_c *PoolManager_ID_Call) Run(run func()) *PoolManager_ID_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *PoolManager_ID_Call) Return(_a0 string) *PoolManager_ID_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *PoolManager_ID_Call) RunAndReturn(run func() string) *PoolManager_ID_Call { - _c.Call.Return(run) - return _c -} - // InstallWebhook provides a mock function with given fields: ctx, param func (_m *PoolManager) InstallWebhook(ctx context.Context, param params.InstallWebhookParams) (params.HookInfo, error) { ret := _m.Called(ctx, param) @@ -252,35 +152,6 @@ func (_m *PoolManager) InstallWebhook(ctx context.Context, param params.InstallW return r0, r1 } -// PoolManager_InstallWebhook_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'InstallWebhook' -type PoolManager_InstallWebhook_Call struct { - *mock.Call -} - -// InstallWebhook is a helper method to define mock.On call -// - ctx context.Context -// - param params.InstallWebhookParams -func (_e *PoolManager_Expecter) InstallWebhook(ctx interface{}, param interface{}) *PoolManager_InstallWebhook_Call { - return &PoolManager_InstallWebhook_Call{Call: _e.mock.On("InstallWebhook", ctx, param)} -} - -func (_c *PoolManager_InstallWebhook_Call) Run(run func(ctx context.Context, param params.InstallWebhookParams)) *PoolManager_InstallWebhook_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.InstallWebhookParams)) - }) - return _c -} - -func (_c *PoolManager_InstallWebhook_Call) Return(_a0 params.HookInfo, _a1 error) *PoolManager_InstallWebhook_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *PoolManager_InstallWebhook_Call) RunAndReturn(run func(context.Context, params.InstallWebhookParams) (params.HookInfo, error)) *PoolManager_InstallWebhook_Call { - _c.Call.Return(run) - return _c -} - // RootCABundle provides a mock function with no fields func (_m *PoolManager) RootCABundle() (params.CertificateBundle, error) { ret := _m.Called() @@ -309,67 +180,6 @@ func (_m *PoolManager) RootCABundle() (params.CertificateBundle, error) { return r0, r1 } -// PoolManager_RootCABundle_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RootCABundle' -type PoolManager_RootCABundle_Call struct { - *mock.Call -} - -// RootCABundle is a helper method to define mock.On call -func (_e *PoolManager_Expecter) RootCABundle() *PoolManager_RootCABundle_Call { - return &PoolManager_RootCABundle_Call{Call: _e.mock.On("RootCABundle")} -} - -func (_c *PoolManager_RootCABundle_Call) Run(run func()) *PoolManager_RootCABundle_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *PoolManager_RootCABundle_Call) Return(_a0 params.CertificateBundle, _a1 error) *PoolManager_RootCABundle_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *PoolManager_RootCABundle_Call) RunAndReturn(run func() (params.CertificateBundle, error)) *PoolManager_RootCABundle_Call { - _c.Call.Return(run) - return _c -} - -// SetPoolRunningState provides a mock function with given fields: isRunning, failureReason -func (_m *PoolManager) SetPoolRunningState(isRunning bool, failureReason string) { - _m.Called(isRunning, failureReason) -} - -// PoolManager_SetPoolRunningState_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetPoolRunningState' -type PoolManager_SetPoolRunningState_Call struct { - *mock.Call -} - -// SetPoolRunningState is a helper method to define mock.On call -// - isRunning bool -// - failureReason string -func (_e *PoolManager_Expecter) SetPoolRunningState(isRunning interface{}, failureReason interface{}) *PoolManager_SetPoolRunningState_Call { - return &PoolManager_SetPoolRunningState_Call{Call: _e.mock.On("SetPoolRunningState", isRunning, failureReason)} -} - -func (_c *PoolManager_SetPoolRunningState_Call) Run(run func(isRunning bool, failureReason string)) *PoolManager_SetPoolRunningState_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(bool), args[1].(string)) - }) - return _c -} - -func (_c *PoolManager_SetPoolRunningState_Call) Return() *PoolManager_SetPoolRunningState_Call { - _c.Call.Return() - return _c -} - -func (_c *PoolManager_SetPoolRunningState_Call) RunAndReturn(run func(bool, string)) *PoolManager_SetPoolRunningState_Call { - _c.Run(run) - return _c -} - // Start provides a mock function with no fields func (_m *PoolManager) Start() error { ret := _m.Called() @@ -388,33 +198,6 @@ func (_m *PoolManager) Start() error { return r0 } -// PoolManager_Start_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Start' -type PoolManager_Start_Call struct { - *mock.Call -} - -// Start is a helper method to define mock.On call -func (_e *PoolManager_Expecter) Start() *PoolManager_Start_Call { - return &PoolManager_Start_Call{Call: _e.mock.On("Start")} -} - -func (_c *PoolManager_Start_Call) Run(run func()) *PoolManager_Start_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *PoolManager_Start_Call) Return(_a0 error) *PoolManager_Start_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *PoolManager_Start_Call) RunAndReturn(run func() error) *PoolManager_Start_Call { - _c.Call.Return(run) - return _c -} - // Status provides a mock function with no fields func (_m *PoolManager) Status() params.PoolManagerStatus { ret := _m.Called() @@ -433,33 +216,6 @@ func (_m *PoolManager) Status() params.PoolManagerStatus { return r0 } -// PoolManager_Status_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Status' -type PoolManager_Status_Call struct { - *mock.Call -} - -// Status is a helper method to define mock.On call -func (_e *PoolManager_Expecter) Status() *PoolManager_Status_Call { - return &PoolManager_Status_Call{Call: _e.mock.On("Status")} -} - -func (_c *PoolManager_Status_Call) Run(run func()) *PoolManager_Status_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *PoolManager_Status_Call) Return(_a0 params.PoolManagerStatus) *PoolManager_Status_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *PoolManager_Status_Call) RunAndReturn(run func() params.PoolManagerStatus) *PoolManager_Status_Call { - _c.Call.Return(run) - return _c -} - // Stop provides a mock function with no fields func (_m *PoolManager) Stop() error { ret := _m.Called() @@ -478,33 +234,6 @@ func (_m *PoolManager) Stop() error { return r0 } -// PoolManager_Stop_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Stop' -type PoolManager_Stop_Call struct { - *mock.Call -} - -// Stop is a helper method to define mock.On call -func (_e *PoolManager_Expecter) Stop() *PoolManager_Stop_Call { - return &PoolManager_Stop_Call{Call: _e.mock.On("Stop")} -} - -func (_c *PoolManager_Stop_Call) Run(run func()) *PoolManager_Stop_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *PoolManager_Stop_Call) Return(_a0 error) *PoolManager_Stop_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *PoolManager_Stop_Call) RunAndReturn(run func() error) *PoolManager_Stop_Call { - _c.Call.Return(run) - return _c -} - // UninstallWebhook provides a mock function with given fields: ctx func (_m *PoolManager) UninstallWebhook(ctx context.Context) error { ret := _m.Called(ctx) @@ -523,34 +252,6 @@ func (_m *PoolManager) UninstallWebhook(ctx context.Context) error { return r0 } -// PoolManager_UninstallWebhook_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UninstallWebhook' -type PoolManager_UninstallWebhook_Call struct { - *mock.Call -} - -// UninstallWebhook is a helper method to define mock.On call -// - ctx context.Context -func (_e *PoolManager_Expecter) UninstallWebhook(ctx interface{}) *PoolManager_UninstallWebhook_Call { - return &PoolManager_UninstallWebhook_Call{Call: _e.mock.On("UninstallWebhook", ctx)} -} - -func (_c *PoolManager_UninstallWebhook_Call) Run(run func(ctx context.Context)) *PoolManager_UninstallWebhook_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *PoolManager_UninstallWebhook_Call) Return(_a0 error) *PoolManager_UninstallWebhook_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *PoolManager_UninstallWebhook_Call) RunAndReturn(run func(context.Context) error) *PoolManager_UninstallWebhook_Call { - _c.Call.Return(run) - return _c -} - // Wait provides a mock function with no fields func (_m *PoolManager) Wait() error { ret := _m.Called() @@ -569,33 +270,6 @@ func (_m *PoolManager) Wait() error { return r0 } -// PoolManager_Wait_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Wait' -type PoolManager_Wait_Call struct { - *mock.Call -} - -// Wait is a helper method to define mock.On call -func (_e *PoolManager_Expecter) Wait() *PoolManager_Wait_Call { - return &PoolManager_Wait_Call{Call: _e.mock.On("Wait")} -} - -func (_c *PoolManager_Wait_Call) Run(run func()) *PoolManager_Wait_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *PoolManager_Wait_Call) Return(_a0 error) *PoolManager_Wait_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *PoolManager_Wait_Call) RunAndReturn(run func() error) *PoolManager_Wait_Call { - _c.Call.Return(run) - return _c -} - // WebhookSecret provides a mock function with no fields func (_m *PoolManager) WebhookSecret() string { ret := _m.Called() @@ -614,33 +288,6 @@ func (_m *PoolManager) WebhookSecret() string { return r0 } -// PoolManager_WebhookSecret_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'WebhookSecret' -type PoolManager_WebhookSecret_Call struct { - *mock.Call -} - -// WebhookSecret is a helper method to define mock.On call -func (_e *PoolManager_Expecter) WebhookSecret() *PoolManager_WebhookSecret_Call { - return &PoolManager_WebhookSecret_Call{Call: _e.mock.On("WebhookSecret")} -} - -func (_c *PoolManager_WebhookSecret_Call) Run(run func()) *PoolManager_WebhookSecret_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *PoolManager_WebhookSecret_Call) Return(_a0 string) *PoolManager_WebhookSecret_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *PoolManager_WebhookSecret_Call) RunAndReturn(run func() string) *PoolManager_WebhookSecret_Call { - _c.Call.Return(run) - return _c -} - // NewPoolManager creates a new instance of PoolManager. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewPoolManager(t interface { diff --git a/runner/common/mocks/Provider.go b/runner/common/mocks/Provider.go index 5bf94a10..e7491ac5 100644 --- a/runner/common/mocks/Provider.go +++ b/runner/common/mocks/Provider.go @@ -1,4 +1,4 @@ -// Code generated by mockery. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks @@ -19,14 +19,6 @@ type Provider struct { mock.Mock } -type Provider_Expecter struct { - mock *mock.Mock -} - -func (_m *Provider) EXPECT() *Provider_Expecter { - return &Provider_Expecter{mock: &_m.Mock} -} - // AsParams provides a mock function with no fields func (_m *Provider) AsParams() params.Provider { ret := _m.Called() @@ -45,33 +37,6 @@ func (_m *Provider) AsParams() params.Provider { return r0 } -// Provider_AsParams_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AsParams' -type Provider_AsParams_Call struct { - *mock.Call -} - -// AsParams is a helper method to define mock.On call -func (_e *Provider_Expecter) AsParams() *Provider_AsParams_Call { - return &Provider_AsParams_Call{Call: _e.mock.On("AsParams")} -} - -func (_c *Provider_AsParams_Call) Run(run func()) *Provider_AsParams_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *Provider_AsParams_Call) Return(_a0 params.Provider) *Provider_AsParams_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Provider_AsParams_Call) RunAndReturn(run func() params.Provider) *Provider_AsParams_Call { - _c.Call.Return(run) - return _c -} - // CreateInstance provides a mock function with given fields: ctx, bootstrapParams, createInstanceParams func (_m *Provider) CreateInstance(ctx context.Context, bootstrapParams garm_provider_commonparams.BootstrapInstance, createInstanceParams common.CreateInstanceParams) (garm_provider_commonparams.ProviderInstance, error) { ret := _m.Called(ctx, bootstrapParams, createInstanceParams) @@ -100,36 +65,6 @@ func (_m *Provider) CreateInstance(ctx context.Context, bootstrapParams garm_pro return r0, r1 } -// Provider_CreateInstance_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateInstance' -type Provider_CreateInstance_Call struct { - *mock.Call -} - -// CreateInstance is a helper method to define mock.On call -// - ctx context.Context -// - bootstrapParams garm_provider_commonparams.BootstrapInstance -// - createInstanceParams common.CreateInstanceParams -func (_e *Provider_Expecter) CreateInstance(ctx interface{}, bootstrapParams interface{}, createInstanceParams interface{}) *Provider_CreateInstance_Call { - return &Provider_CreateInstance_Call{Call: _e.mock.On("CreateInstance", ctx, bootstrapParams, createInstanceParams)} -} - -func (_c *Provider_CreateInstance_Call) Run(run func(ctx context.Context, bootstrapParams garm_provider_commonparams.BootstrapInstance, createInstanceParams common.CreateInstanceParams)) *Provider_CreateInstance_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(garm_provider_commonparams.BootstrapInstance), args[2].(common.CreateInstanceParams)) - }) - return _c -} - -func (_c *Provider_CreateInstance_Call) Return(_a0 garm_provider_commonparams.ProviderInstance, _a1 error) *Provider_CreateInstance_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Provider_CreateInstance_Call) RunAndReturn(run func(context.Context, garm_provider_commonparams.BootstrapInstance, common.CreateInstanceParams) (garm_provider_commonparams.ProviderInstance, error)) *Provider_CreateInstance_Call { - _c.Call.Return(run) - return _c -} - // DeleteInstance provides a mock function with given fields: ctx, instance, deleteInstanceParams func (_m *Provider) DeleteInstance(ctx context.Context, instance string, deleteInstanceParams common.DeleteInstanceParams) error { ret := _m.Called(ctx, instance, deleteInstanceParams) @@ -148,36 +83,6 @@ func (_m *Provider) DeleteInstance(ctx context.Context, instance string, deleteI return r0 } -// Provider_DeleteInstance_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteInstance' -type Provider_DeleteInstance_Call struct { - *mock.Call -} - -// DeleteInstance is a helper method to define mock.On call -// - ctx context.Context -// - instance string -// - deleteInstanceParams common.DeleteInstanceParams -func (_e *Provider_Expecter) DeleteInstance(ctx interface{}, instance interface{}, deleteInstanceParams interface{}) *Provider_DeleteInstance_Call { - return &Provider_DeleteInstance_Call{Call: _e.mock.On("DeleteInstance", ctx, instance, deleteInstanceParams)} -} - -func (_c *Provider_DeleteInstance_Call) Run(run func(ctx context.Context, instance string, deleteInstanceParams common.DeleteInstanceParams)) *Provider_DeleteInstance_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(common.DeleteInstanceParams)) - }) - return _c -} - -func (_c *Provider_DeleteInstance_Call) Return(_a0 error) *Provider_DeleteInstance_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Provider_DeleteInstance_Call) RunAndReturn(run func(context.Context, string, common.DeleteInstanceParams) error) *Provider_DeleteInstance_Call { - _c.Call.Return(run) - return _c -} - // DisableJITConfig provides a mock function with no fields func (_m *Provider) DisableJITConfig() bool { ret := _m.Called() @@ -196,33 +101,6 @@ func (_m *Provider) DisableJITConfig() bool { return r0 } -// Provider_DisableJITConfig_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DisableJITConfig' -type Provider_DisableJITConfig_Call struct { - *mock.Call -} - -// DisableJITConfig is a helper method to define mock.On call -func (_e *Provider_Expecter) DisableJITConfig() *Provider_DisableJITConfig_Call { - return &Provider_DisableJITConfig_Call{Call: _e.mock.On("DisableJITConfig")} -} - -func (_c *Provider_DisableJITConfig_Call) Run(run func()) *Provider_DisableJITConfig_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *Provider_DisableJITConfig_Call) Return(_a0 bool) *Provider_DisableJITConfig_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Provider_DisableJITConfig_Call) RunAndReturn(run func() bool) *Provider_DisableJITConfig_Call { - _c.Call.Return(run) - return _c -} - // GetInstance provides a mock function with given fields: ctx, instance, getInstanceParams func (_m *Provider) GetInstance(ctx context.Context, instance string, getInstanceParams common.GetInstanceParams) (garm_provider_commonparams.ProviderInstance, error) { ret := _m.Called(ctx, instance, getInstanceParams) @@ -251,36 +129,6 @@ func (_m *Provider) GetInstance(ctx context.Context, instance string, getInstanc return r0, r1 } -// Provider_GetInstance_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetInstance' -type Provider_GetInstance_Call struct { - *mock.Call -} - -// GetInstance is a helper method to define mock.On call -// - ctx context.Context -// - instance string -// - getInstanceParams common.GetInstanceParams -func (_e *Provider_Expecter) GetInstance(ctx interface{}, instance interface{}, getInstanceParams interface{}) *Provider_GetInstance_Call { - return &Provider_GetInstance_Call{Call: _e.mock.On("GetInstance", ctx, instance, getInstanceParams)} -} - -func (_c *Provider_GetInstance_Call) Run(run func(ctx context.Context, instance string, getInstanceParams common.GetInstanceParams)) *Provider_GetInstance_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(common.GetInstanceParams)) - }) - return _c -} - -func (_c *Provider_GetInstance_Call) Return(_a0 garm_provider_commonparams.ProviderInstance, _a1 error) *Provider_GetInstance_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Provider_GetInstance_Call) RunAndReturn(run func(context.Context, string, common.GetInstanceParams) (garm_provider_commonparams.ProviderInstance, error)) *Provider_GetInstance_Call { - _c.Call.Return(run) - return _c -} - // ListInstances provides a mock function with given fields: ctx, poolID, listInstancesParams func (_m *Provider) ListInstances(ctx context.Context, poolID string, listInstancesParams common.ListInstancesParams) ([]garm_provider_commonparams.ProviderInstance, error) { ret := _m.Called(ctx, poolID, listInstancesParams) @@ -311,36 +159,6 @@ func (_m *Provider) ListInstances(ctx context.Context, poolID string, listInstan return r0, r1 } -// Provider_ListInstances_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListInstances' -type Provider_ListInstances_Call struct { - *mock.Call -} - -// ListInstances is a helper method to define mock.On call -// - ctx context.Context -// - poolID string -// - listInstancesParams common.ListInstancesParams -func (_e *Provider_Expecter) ListInstances(ctx interface{}, poolID interface{}, listInstancesParams interface{}) *Provider_ListInstances_Call { - return &Provider_ListInstances_Call{Call: _e.mock.On("ListInstances", ctx, poolID, listInstancesParams)} -} - -func (_c *Provider_ListInstances_Call) Run(run func(ctx context.Context, poolID string, listInstancesParams common.ListInstancesParams)) *Provider_ListInstances_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(common.ListInstancesParams)) - }) - return _c -} - -func (_c *Provider_ListInstances_Call) Return(_a0 []garm_provider_commonparams.ProviderInstance, _a1 error) *Provider_ListInstances_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Provider_ListInstances_Call) RunAndReturn(run func(context.Context, string, common.ListInstancesParams) ([]garm_provider_commonparams.ProviderInstance, error)) *Provider_ListInstances_Call { - _c.Call.Return(run) - return _c -} - // RemoveAllInstances provides a mock function with given fields: ctx, removeAllInstancesParams func (_m *Provider) RemoveAllInstances(ctx context.Context, removeAllInstancesParams common.RemoveAllInstancesParams) error { ret := _m.Called(ctx, removeAllInstancesParams) @@ -359,35 +177,6 @@ func (_m *Provider) RemoveAllInstances(ctx context.Context, removeAllInstancesPa return r0 } -// Provider_RemoveAllInstances_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RemoveAllInstances' -type Provider_RemoveAllInstances_Call struct { - *mock.Call -} - -// RemoveAllInstances is a helper method to define mock.On call -// - ctx context.Context -// - removeAllInstancesParams common.RemoveAllInstancesParams -func (_e *Provider_Expecter) RemoveAllInstances(ctx interface{}, removeAllInstancesParams interface{}) *Provider_RemoveAllInstances_Call { - return &Provider_RemoveAllInstances_Call{Call: _e.mock.On("RemoveAllInstances", ctx, removeAllInstancesParams)} -} - -func (_c *Provider_RemoveAllInstances_Call) Run(run func(ctx context.Context, removeAllInstancesParams common.RemoveAllInstancesParams)) *Provider_RemoveAllInstances_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(common.RemoveAllInstancesParams)) - }) - return _c -} - -func (_c *Provider_RemoveAllInstances_Call) Return(_a0 error) *Provider_RemoveAllInstances_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Provider_RemoveAllInstances_Call) RunAndReturn(run func(context.Context, common.RemoveAllInstancesParams) error) *Provider_RemoveAllInstances_Call { - _c.Call.Return(run) - return _c -} - // Start provides a mock function with given fields: ctx, instance, startParams func (_m *Provider) Start(ctx context.Context, instance string, startParams common.StartParams) error { ret := _m.Called(ctx, instance, startParams) @@ -406,36 +195,6 @@ func (_m *Provider) Start(ctx context.Context, instance string, startParams comm return r0 } -// Provider_Start_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Start' -type Provider_Start_Call struct { - *mock.Call -} - -// Start is a helper method to define mock.On call -// - ctx context.Context -// - instance string -// - startParams common.StartParams -func (_e *Provider_Expecter) Start(ctx interface{}, instance interface{}, startParams interface{}) *Provider_Start_Call { - return &Provider_Start_Call{Call: _e.mock.On("Start", ctx, instance, startParams)} -} - -func (_c *Provider_Start_Call) Run(run func(ctx context.Context, instance string, startParams common.StartParams)) *Provider_Start_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(common.StartParams)) - }) - return _c -} - -func (_c *Provider_Start_Call) Return(_a0 error) *Provider_Start_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Provider_Start_Call) RunAndReturn(run func(context.Context, string, common.StartParams) error) *Provider_Start_Call { - _c.Call.Return(run) - return _c -} - // Stop provides a mock function with given fields: ctx, instance, stopParams func (_m *Provider) Stop(ctx context.Context, instance string, stopParams common.StopParams) error { ret := _m.Called(ctx, instance, stopParams) @@ -454,36 +213,6 @@ func (_m *Provider) Stop(ctx context.Context, instance string, stopParams common return r0 } -// Provider_Stop_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Stop' -type Provider_Stop_Call struct { - *mock.Call -} - -// Stop is a helper method to define mock.On call -// - ctx context.Context -// - instance string -// - stopParams common.StopParams -func (_e *Provider_Expecter) Stop(ctx interface{}, instance interface{}, stopParams interface{}) *Provider_Stop_Call { - return &Provider_Stop_Call{Call: _e.mock.On("Stop", ctx, instance, stopParams)} -} - -func (_c *Provider_Stop_Call) Run(run func(ctx context.Context, instance string, stopParams common.StopParams)) *Provider_Stop_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(common.StopParams)) - }) - return _c -} - -func (_c *Provider_Stop_Call) Return(_a0 error) *Provider_Stop_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Provider_Stop_Call) RunAndReturn(run func(context.Context, string, common.StopParams) error) *Provider_Stop_Call { - _c.Call.Return(run) - return _c -} - // NewProvider creates a new instance of Provider. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewProvider(t interface { diff --git a/runner/common/mocks/RateLimitClient.go b/runner/common/mocks/RateLimitClient.go deleted file mode 100644 index b7e52f71..00000000 --- a/runner/common/mocks/RateLimitClient.go +++ /dev/null @@ -1,95 +0,0 @@ -// Code generated by mockery. DO NOT EDIT. - -package mocks - -import ( - context "context" - - github "github.com/google/go-github/v72/github" - mock "github.com/stretchr/testify/mock" -) - -// RateLimitClient is an autogenerated mock type for the RateLimitClient type -type RateLimitClient struct { - mock.Mock -} - -type RateLimitClient_Expecter struct { - mock *mock.Mock -} - -func (_m *RateLimitClient) EXPECT() *RateLimitClient_Expecter { - return &RateLimitClient_Expecter{mock: &_m.Mock} -} - -// RateLimit provides a mock function with given fields: ctx -func (_m *RateLimitClient) RateLimit(ctx context.Context) (*github.RateLimits, error) { - ret := _m.Called(ctx) - - if len(ret) == 0 { - panic("no return value specified for RateLimit") - } - - var r0 *github.RateLimits - var r1 error - if rf, ok := ret.Get(0).(func(context.Context) (*github.RateLimits, error)); ok { - return rf(ctx) - } - if rf, ok := ret.Get(0).(func(context.Context) *github.RateLimits); ok { - r0 = rf(ctx) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*github.RateLimits) - } - } - - if rf, ok := ret.Get(1).(func(context.Context) error); ok { - r1 = rf(ctx) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// RateLimitClient_RateLimit_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RateLimit' -type RateLimitClient_RateLimit_Call struct { - *mock.Call -} - -// RateLimit is a helper method to define mock.On call -// - ctx context.Context -func (_e *RateLimitClient_Expecter) RateLimit(ctx interface{}) *RateLimitClient_RateLimit_Call { - return &RateLimitClient_RateLimit_Call{Call: _e.mock.On("RateLimit", ctx)} -} - -func (_c *RateLimitClient_RateLimit_Call) Run(run func(ctx context.Context)) *RateLimitClient_RateLimit_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) - }) - return _c -} - -func (_c *RateLimitClient_RateLimit_Call) Return(_a0 *github.RateLimits, _a1 error) *RateLimitClient_RateLimit_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *RateLimitClient_RateLimit_Call) RunAndReturn(run func(context.Context) (*github.RateLimits, error)) *RateLimitClient_RateLimit_Call { - _c.Call.Return(run) - return _c -} - -// NewRateLimitClient creates a new instance of RateLimitClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewRateLimitClient(t interface { - mock.TestingT - Cleanup(func()) -}) *RateLimitClient { - mock := &RateLimitClient{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/runner/common/pool.go b/runner/common/pool.go index 4cb86a62..68a7ddf0 100644 --- a/runner/common/pool.go +++ b/runner/common/pool.go @@ -36,7 +36,7 @@ const ( BackoffTimer = 1 * time.Minute ) -//go:generate go run github.com/vektra/mockery/v2@latest +//go:generate mockery --all type PoolManager interface { // ID returns the ID of the entity (repo, org, enterprise) ID() string @@ -54,6 +54,13 @@ type PoolManager interface { // for it and call this function with the WorkflowJob as a parameter. HandleWorkflowJob(job params.WorkflowJob) error + // DeleteRunner will attempt to remove a runner from the pool. If forceRemove is true, any error + // received from the provider will be ignored and we will proceed to remove the runner from the database. + // An error received while attempting to remove from GitHub (other than 404) will still stop the deletion + // process. This can happen if the runner is already processing a job. At which point, you can simply cancel + // the job in github. Doing so will prompt GARM to reap the runner automatically. + DeleteRunner(runner params.Instance, forceRemove, bypassGHUnauthorizedError bool) error + // InstallWebhook will create a webhook in github for the entity associated with this pool manager. InstallWebhook(ctx context.Context, param params.InstallWebhookParams) (params.HookInfo, error) // GetWebhookInfo will return information about the webhook installed in github for the entity associated @@ -67,8 +74,6 @@ type PoolManager interface { // may use internal or self signed certificates. RootCABundle() (params.CertificateBundle, error) - SetPoolRunningState(isRunning bool, failureReason string) - // Start will start the pool manager and all associated workers. Start() error // Stop will stop the pool manager and all associated workers. diff --git a/runner/common/provider.go b/runner/common/provider.go index a5d0db66..7454540f 100644 --- a/runner/common/provider.go +++ b/runner/common/provider.go @@ -21,7 +21,7 @@ import ( "github.com/cloudbase/garm/params" ) -//go:generate go run github.com/vektra/mockery/v2@latest +//go:generate mockery --all type Provider interface { // CreateInstance creates a new compute instance in the provider. CreateInstance(ctx context.Context, bootstrapParams commonParams.BootstrapInstance, createInstanceParams CreateInstanceParams) (commonParams.ProviderInstance, error) diff --git a/runner/common/util.go b/runner/common/util.go index 5130dcfd..37e9b60c 100644 --- a/runner/common/util.go +++ b/runner/common/util.go @@ -1,22 +1,7 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package common import ( "context" - "net/url" "github.com/google/go-github/v72/github" @@ -31,26 +16,15 @@ type GithubEntityOperations interface { PingEntityHook(ctx context.Context, id int64) (ret *github.Response, err error) ListEntityRunners(ctx context.Context, opts *github.ListRunnersOptions) (*github.Runners, *github.Response, error) ListEntityRunnerApplicationDownloads(ctx context.Context) ([]*github.RunnerApplicationDownload, *github.Response, error) - RemoveEntityRunner(ctx context.Context, runnerID int64) error - RateLimit(ctx context.Context) (*github.RateLimits, error) + RemoveEntityRunner(ctx context.Context, runnerID int64) (*github.Response, error) CreateEntityRegistrationToken(ctx context.Context) (*github.RegistrationToken, *github.Response, error) GetEntityJITConfig(ctx context.Context, instance string, pool params.Pool, labels []string) (jitConfigMap map[string]string, runner *github.Runner, err error) - GetEntityRunnerGroupIDByName(ctx context.Context, runnerGroupName string) (int64, error) - - // GetEntity returns the GitHub entity for which the github client was instanciated. - GetEntity() params.ForgeEntity - // GithubBaseURL returns the base URL for the github or GHES API. - GithubBaseURL() *url.URL -} - -type RateLimitClient interface { - RateLimit(ctx context.Context) (*github.RateLimits, error) } // GithubClient that describes the minimum list of functions we need to interact with github. // Allows for easier testing. // -//go:generate go run github.com/vektra/mockery/v2@latest +//go:generate mockery --all type GithubClient interface { GithubEntityOperations diff --git a/runner/common_test.go b/runner/common_test.go index 247b5ab1..b9b53545 100644 --- a/runner/common_test.go +++ b/runner/common_test.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package runner const ( diff --git a/runner/enterprises.go b/runner/enterprises.go index 6b393abd..3e9e3b8c 100644 --- a/runner/enterprises.go +++ b/runner/enterprises.go @@ -1,26 +1,13 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package runner import ( "context" - "errors" "fmt" "log/slog" "strings" + "github.com/pkg/errors" + runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/auth" "github.com/cloudbase/garm/params" @@ -35,7 +22,7 @@ func (r *Runner) CreateEnterprise(ctx context.Context, param params.CreateEnterp err = param.Validate() if err != nil { - return params.Enterprise{}, fmt.Errorf("error validating params: %w", err) + return params.Enterprise{}, errors.Wrap(err, "validating params") } creds, err := r.store.GetGithubCredentialsByName(ctx, param.CredentialsName, true) @@ -46,15 +33,15 @@ func (r *Runner) CreateEnterprise(ctx context.Context, param params.CreateEnterp _, err = r.store.GetEnterprise(ctx, param.Name, creds.Endpoint.Name) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { - return params.Enterprise{}, fmt.Errorf("error fetching enterprise: %w", err) + return params.Enterprise{}, errors.Wrap(err, "fetching enterprise") } } else { return params.Enterprise{}, runnerErrors.NewConflictError("enterprise %s already exists", param.Name) } - enterprise, err = r.store.CreateEnterprise(ctx, param.Name, creds, param.WebhookSecret, param.PoolBalancerType) + enterprise, err = r.store.CreateEnterprise(ctx, param.Name, creds.Name, param.WebhookSecret, param.PoolBalancerType) if err != nil { - return params.Enterprise{}, fmt.Errorf("error creating enterprise: %w", err) + return params.Enterprise{}, errors.Wrap(err, "creating enterprise") } defer func() { @@ -72,7 +59,7 @@ func (r *Runner) CreateEnterprise(ctx context.Context, param params.CreateEnterp var poolMgr common.PoolManager poolMgr, err = r.poolManagerCtrl.CreateEnterprisePoolManager(r.ctx, enterprise, r.providers, r.store) if err != nil { - return params.Enterprise{}, fmt.Errorf("error creating enterprise pool manager: %w", err) + return params.Enterprise{}, errors.Wrap(err, "creating enterprise pool manager") } if err := poolMgr.Start(); err != nil { if deleteErr := r.poolManagerCtrl.DeleteEnterprisePoolManager(enterprise); deleteErr != nil { @@ -80,19 +67,19 @@ func (r *Runner) CreateEnterprise(ctx context.Context, param params.CreateEnterp ctx, "failed to cleanup pool manager for enterprise", "enterprise_id", enterprise.ID) } - return params.Enterprise{}, fmt.Errorf("error starting enterprise pool manager: %w", err) + return params.Enterprise{}, errors.Wrap(err, "starting enterprise pool manager") } return enterprise, nil } -func (r *Runner) ListEnterprises(ctx context.Context, filter params.EnterpriseFilter) ([]params.Enterprise, error) { +func (r *Runner) ListEnterprises(ctx context.Context) ([]params.Enterprise, error) { if !auth.IsAdmin(ctx) { return nil, runnerErrors.ErrUnauthorized } - enterprises, err := r.store.ListEnterprises(ctx, filter) + enterprises, err := r.store.ListEnterprises(ctx) if err != nil { - return nil, fmt.Errorf("error listing enterprises: %w", err) + return nil, errors.Wrap(err, "listing enterprises") } var allEnterprises []params.Enterprise @@ -118,7 +105,7 @@ func (r *Runner) GetEnterpriseByID(ctx context.Context, enterpriseID string) (pa enterprise, err := r.store.GetEnterpriseByID(ctx, enterpriseID) if err != nil { - return params.Enterprise{}, fmt.Errorf("error fetching enterprise: %w", err) + return params.Enterprise{}, errors.Wrap(err, "fetching enterprise") } poolMgr, err := r.poolManagerCtrl.GetEnterprisePoolManager(enterprise) if err != nil { @@ -136,17 +123,17 @@ func (r *Runner) DeleteEnterprise(ctx context.Context, enterpriseID string) erro enterprise, err := r.store.GetEnterpriseByID(ctx, enterpriseID) if err != nil { - return fmt.Errorf("error fetching enterprise: %w", err) + return errors.Wrap(err, "fetching enterprise") } entity, err := enterprise.GetEntity() if err != nil { - return fmt.Errorf("error getting entity: %w", err) + return errors.Wrap(err, "getting entity") } pools, err := r.store.ListEntityPools(ctx, entity) if err != nil { - return fmt.Errorf("error fetching enterprise pools: %w", err) + return errors.Wrap(err, "fetching enterprise pools") } if len(pools) > 0 { @@ -158,21 +145,12 @@ func (r *Runner) DeleteEnterprise(ctx context.Context, enterpriseID string) erro return runnerErrors.NewBadRequestError("enterprise has pools defined (%s)", strings.Join(poolIDs, ", ")) } - scaleSets, err := r.store.ListEntityScaleSets(ctx, entity) - if err != nil { - return fmt.Errorf("error fetching enterprise scale sets: %w", err) - } - - if len(scaleSets) > 0 { - return runnerErrors.NewBadRequestError("enterprise has scale sets defined; delete them first") - } - if err := r.poolManagerCtrl.DeleteEnterprisePoolManager(enterprise); err != nil { - return fmt.Errorf("error deleting enterprise pool manager: %w", err) + return errors.Wrap(err, "deleting enterprise pool manager") } if err := r.store.DeleteEnterprise(ctx, enterpriseID); err != nil { - return fmt.Errorf("error removing enterprise %s: %w", enterpriseID, err) + return errors.Wrapf(err, "removing enterprise %s", enterpriseID) } return nil } @@ -193,7 +171,7 @@ func (r *Runner) UpdateEnterprise(ctx context.Context, enterpriseID string, para enterprise, err := r.store.UpdateEnterprise(ctx, enterpriseID, param) if err != nil { - return params.Enterprise{}, fmt.Errorf("error updating enterprise: %w", err) + return params.Enterprise{}, errors.Wrap(err, "updating enterprise") } poolMgr, err := r.poolManagerCtrl.GetEnterprisePoolManager(enterprise) @@ -219,9 +197,9 @@ func (r *Runner) CreateEnterprisePool(ctx context.Context, enterpriseID string, param.RunnerBootstrapTimeout = appdefaults.DefaultRunnerBootstrapTimeout } - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: enterpriseID, - EntityType: params.ForgeEntityTypeEnterprise, + EntityType: params.GithubEntityTypeEnterprise, } pool, err := r.store.CreateEntityPool(ctx, entity, createPoolParams) @@ -236,13 +214,13 @@ func (r *Runner) GetEnterprisePoolByID(ctx context.Context, enterpriseID, poolID if !auth.IsAdmin(ctx) { return params.Pool{}, runnerErrors.ErrUnauthorized } - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: enterpriseID, - EntityType: params.ForgeEntityTypeEnterprise, + EntityType: params.GithubEntityTypeEnterprise, } pool, err := r.store.GetEntityPool(ctx, entity, poolID) if err != nil { - return params.Pool{}, fmt.Errorf("error fetching pool: %w", err) + return params.Pool{}, errors.Wrap(err, "fetching pool") } return pool, nil } @@ -252,14 +230,14 @@ func (r *Runner) DeleteEnterprisePool(ctx context.Context, enterpriseID, poolID return runnerErrors.ErrUnauthorized } - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: enterpriseID, - EntityType: params.ForgeEntityTypeEnterprise, + EntityType: params.GithubEntityTypeEnterprise, } pool, err := r.store.GetEntityPool(ctx, entity, poolID) if err != nil { - return fmt.Errorf("error fetching pool: %w", err) + return errors.Wrap(err, "fetching pool") } // nolint:golangci-lint,godox @@ -273,7 +251,7 @@ func (r *Runner) DeleteEnterprisePool(ctx context.Context, enterpriseID, poolID } if err := r.store.DeleteEntityPool(ctx, entity, poolID); err != nil { - return fmt.Errorf("error deleting pool: %w", err) + return errors.Wrap(err, "deleting pool") } return nil } @@ -283,13 +261,13 @@ func (r *Runner) ListEnterprisePools(ctx context.Context, enterpriseID string) ( return []params.Pool{}, runnerErrors.ErrUnauthorized } - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: enterpriseID, - EntityType: params.ForgeEntityTypeEnterprise, + EntityType: params.GithubEntityTypeEnterprise, } pools, err := r.store.ListEntityPools(ctx, entity) if err != nil { - return nil, fmt.Errorf("error fetching pools: %w", err) + return nil, errors.Wrap(err, "fetching pools") } return pools, nil } @@ -299,13 +277,13 @@ func (r *Runner) UpdateEnterprisePool(ctx context.Context, enterpriseID, poolID return params.Pool{}, runnerErrors.ErrUnauthorized } - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: enterpriseID, - EntityType: params.ForgeEntityTypeEnterprise, + EntityType: params.GithubEntityTypeEnterprise, } pool, err := r.store.GetEntityPool(ctx, entity, poolID) if err != nil { - return params.Pool{}, fmt.Errorf("error fetching pool: %w", err) + return params.Pool{}, errors.Wrap(err, "fetching pool") } maxRunners := pool.MaxRunners @@ -324,7 +302,7 @@ func (r *Runner) UpdateEnterprisePool(ctx context.Context, enterpriseID, poolID newPool, err := r.store.UpdateEntityPool(ctx, entity, poolID, param) if err != nil { - return params.Pool{}, fmt.Errorf("error updating pool: %w", err) + return params.Pool{}, errors.Wrap(err, "updating pool") } return newPool, nil } @@ -333,13 +311,13 @@ func (r *Runner) ListEnterpriseInstances(ctx context.Context, enterpriseID strin if !auth.IsAdmin(ctx) { return nil, runnerErrors.ErrUnauthorized } - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: enterpriseID, - EntityType: params.ForgeEntityTypeEnterprise, + EntityType: params.GithubEntityTypeEnterprise, } instances, err := r.store.ListEntityInstances(ctx, entity) if err != nil { - return []params.Instance{}, fmt.Errorf("error fetching instances: %w", err) + return []params.Instance{}, errors.Wrap(err, "fetching instances") } return instances, nil } @@ -350,12 +328,12 @@ func (r *Runner) findEnterprisePoolManager(name, endpointName string) (common.Po enterprise, err := r.store.GetEnterprise(r.ctx, name, endpointName) if err != nil { - return nil, fmt.Errorf("error fetching enterprise: %w", err) + return nil, errors.Wrap(err, "fetching enterprise") } poolManager, err := r.poolManagerCtrl.GetEnterprisePoolManager(enterprise) if err != nil { - return nil, fmt.Errorf("error fetching pool manager for enterprise: %w", err) + return nil, errors.Wrap(err, "fetching pool manager for enterprise") } return poolManager, nil } diff --git a/runner/enterprises_test.go b/runner/enterprises_test.go index 0724ccf9..94bc4807 100644 --- a/runner/enterprises_test.go +++ b/runner/enterprises_test.go @@ -16,10 +16,10 @@ package runner import ( "context" - "errors" "fmt" "testing" + "github.com/pkg/errors" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/suite" @@ -39,7 +39,7 @@ type EnterpriseTestFixtures struct { Store dbCommon.Store StoreEnterprises map[string]params.Enterprise Providers map[string]common.Provider - Credentials map[string]params.ForgeCredentials + Credentials map[string]params.GithubCredentials CreateEnterpriseParams params.CreateEnterpriseParams CreatePoolParams params.CreatePoolParams CreateInstanceParams params.CreateInstanceParams @@ -56,11 +56,9 @@ type EnterpriseTestSuite struct { Fixtures *EnterpriseTestFixtures Runner *Runner - testCreds params.ForgeCredentials - secondaryTestCreds params.ForgeCredentials - forgeEndpoint params.ForgeEndpoint - ghesEndpoint params.ForgeEndpoint - ghesCreds params.ForgeCredentials + testCreds params.GithubCredentials + secondaryTestCreds params.GithubCredentials + githubEndpoint params.GithubEndpoint } func (s *EnterpriseTestSuite) SetupTest() { @@ -72,11 +70,9 @@ func (s *EnterpriseTestSuite) SetupTest() { } adminCtx := garmTesting.ImpersonateAdminContext(context.Background(), db, s.T()) - s.forgeEndpoint = garmTesting.CreateDefaultGithubEndpoint(adminCtx, db, s.T()) - s.ghesEndpoint = garmTesting.CreateGHESEndpoint(adminCtx, db, s.T()) - s.testCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "new-creds", db, s.T(), s.forgeEndpoint) - s.secondaryTestCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "secondary-creds", db, s.T(), s.forgeEndpoint) - s.ghesCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "ghes-creds", db, s.T(), s.ghesEndpoint) + s.githubEndpoint = garmTesting.CreateDefaultGithubEndpoint(adminCtx, db, s.T()) + s.testCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "new-creds", db, s.T(), s.githubEndpoint) + s.secondaryTestCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "secondary-creds", db, s.T(), s.githubEndpoint) // create some organization objects in the database, for testing purposes enterprises := map[string]params.Enterprise{} @@ -85,7 +81,7 @@ func (s *EnterpriseTestSuite) SetupTest() { enterprise, err := db.CreateEnterprise( adminCtx, name, - s.testCreds, + s.testCreds.Name, fmt.Sprintf("test-webhook-secret-%v", i), params.PoolBalancerTypeRoundRobin, ) @@ -107,7 +103,7 @@ func (s *EnterpriseTestSuite) SetupTest() { Providers: map[string]common.Provider{ "test-provider": providerMock, }, - Credentials: map[string]params.ForgeCredentials{ + Credentials: map[string]params.GithubCredentials{ s.testCreds.Name: s.testCreds, s.secondaryTestCreds.Name: s.secondaryTestCreds, }, @@ -210,7 +206,7 @@ func (s *EnterpriseTestSuite) TestCreateEnterprisePoolMgrFailed() { s.Fixtures.PoolMgrMock.AssertExpectations(s.T()) s.Fixtures.PoolMgrCtrlMock.AssertExpectations(s.T()) - s.Require().Equal(fmt.Sprintf("error creating enterprise pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) + s.Require().Equal(fmt.Sprintf("creating enterprise pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) } func (s *EnterpriseTestSuite) TestCreateEnterpriseStartPoolMgrFailed() { @@ -222,80 +218,20 @@ func (s *EnterpriseTestSuite) TestCreateEnterpriseStartPoolMgrFailed() { s.Fixtures.PoolMgrMock.AssertExpectations(s.T()) s.Fixtures.PoolMgrCtrlMock.AssertExpectations(s.T()) - s.Require().Equal(fmt.Sprintf("error starting enterprise pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) + s.Require().Equal(fmt.Sprintf("starting enterprise pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) } func (s *EnterpriseTestSuite) TestListEnterprises() { s.Fixtures.PoolMgrCtrlMock.On("GetEnterprisePoolManager", mock.AnythingOfType("params.Enterprise")).Return(s.Fixtures.PoolMgrMock, nil) s.Fixtures.PoolMgrMock.On("Status").Return(params.PoolManagerStatus{IsRunning: true}, nil) - orgs, err := s.Runner.ListEnterprises(s.Fixtures.AdminContext, params.EnterpriseFilter{}) + orgs, err := s.Runner.ListEnterprises(s.Fixtures.AdminContext) s.Require().Nil(err) garmTesting.EqualDBEntityByName(s.T(), garmTesting.DBEntityMapToSlice(s.Fixtures.StoreEnterprises), orgs) } -func (s *EnterpriseTestSuite) TestListEnterprisesWithFilters() { - s.Fixtures.PoolMgrCtrlMock.On("GetEnterprisePoolManager", mock.AnythingOfType("params.Enterprise")).Return(s.Fixtures.PoolMgrMock, nil) - s.Fixtures.PoolMgrMock.On("Status").Return(params.PoolManagerStatus{IsRunning: true}, nil) - - enterprise, err := s.Fixtures.Store.CreateEnterprise( - s.Fixtures.AdminContext, - "test-enterprise", - s.testCreds, - "super secret", - params.PoolBalancerTypeRoundRobin, - ) - s.Require().NoError(err) - enterprise2, err := s.Fixtures.Store.CreateEnterprise( - s.Fixtures.AdminContext, - "test-enterprise2", - s.testCreds, - "super secret", - params.PoolBalancerTypeRoundRobin, - ) - s.Require().NoError(err) - enterprise3, err := s.Fixtures.Store.CreateEnterprise( - s.Fixtures.AdminContext, - "test-enterprise", - s.ghesCreds, - "super secret", - params.PoolBalancerTypeRoundRobin, - ) - s.Require().NoError(err) - orgs, err := s.Runner.ListEnterprises( - s.Fixtures.AdminContext, - params.EnterpriseFilter{ - Name: "test-enterprise", - }, - ) - - s.Require().Nil(err) - garmTesting.EqualDBEntityByName(s.T(), []params.Enterprise{enterprise, enterprise3}, orgs) - - orgs, err = s.Runner.ListEnterprises( - s.Fixtures.AdminContext, - params.EnterpriseFilter{ - Name: "test-enterprise", - Endpoint: s.ghesEndpoint.Name, - }, - ) - - s.Require().Nil(err) - garmTesting.EqualDBEntityByName(s.T(), []params.Enterprise{enterprise3}, orgs) - - orgs, err = s.Runner.ListEnterprises( - s.Fixtures.AdminContext, - params.EnterpriseFilter{ - Name: "test-enterprise2", - }, - ) - - s.Require().Nil(err) - garmTesting.EqualDBEntityByName(s.T(), []params.Enterprise{enterprise2}, orgs) -} - func (s *EnterpriseTestSuite) TestListEnterprisesErrUnauthorized() { - _, err := s.Runner.ListEnterprises(context.Background(), params.EnterpriseFilter{}) + _, err := s.Runner.ListEnterprises(context.Background()) s.Require().Equal(runnerErrors.ErrUnauthorized, err) } @@ -324,7 +260,7 @@ func (s *EnterpriseTestSuite) TestDeleteEnterprise() { s.Require().Nil(err) _, err = s.Fixtures.Store.GetEnterpriseByID(s.Fixtures.AdminContext, s.Fixtures.StoreEnterprises["test-enterprise-3"].ID) - s.Require().Equal("error fetching enterprise: not found", err.Error()) + s.Require().Equal("fetching enterprise: not found", err.Error()) } func (s *EnterpriseTestSuite) TestDeleteEnterpriseErrUnauthorized() { @@ -334,9 +270,9 @@ func (s *EnterpriseTestSuite) TestDeleteEnterpriseErrUnauthorized() { } func (s *EnterpriseTestSuite) TestDeleteEnterprisePoolDefinedFailed() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreEnterprises["test-enterprise-1"].ID, - EntityType: params.ForgeEntityTypeEnterprise, + EntityType: params.GithubEntityTypeEnterprise, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -354,7 +290,7 @@ func (s *EnterpriseTestSuite) TestDeleteEnterprisePoolMgrFailed() { err := s.Runner.DeleteEnterprise(s.Fixtures.AdminContext, s.Fixtures.StoreEnterprises["test-enterprise-1"].ID) s.Fixtures.PoolMgrCtrlMock.AssertExpectations(s.T()) - s.Require().Equal(fmt.Sprintf("error deleting enterprise pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) + s.Require().Equal(fmt.Sprintf("deleting enterprise pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) } func (s *EnterpriseTestSuite) TestUpdateEnterprise() { @@ -441,9 +377,9 @@ func (s *EnterpriseTestSuite) TestCreateEnterprisePoolFetchPoolParamsFailed() { } func (s *EnterpriseTestSuite) TestGetEnterprisePoolByID() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreEnterprises["test-enterprise-1"].ID, - EntityType: params.ForgeEntityTypeEnterprise, + EntityType: params.GithubEntityTypeEnterprise, } enterprisePool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -463,9 +399,9 @@ func (s *EnterpriseTestSuite) TestGetEnterprisePoolByIDErrUnauthorized() { } func (s *EnterpriseTestSuite) TestDeleteEnterprisePool() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreEnterprises["test-enterprise-1"].ID, - EntityType: params.ForgeEntityTypeEnterprise, + EntityType: params.GithubEntityTypeEnterprise, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -477,7 +413,7 @@ func (s *EnterpriseTestSuite) TestDeleteEnterprisePool() { s.Require().Nil(err) _, err = s.Fixtures.Store.GetEntityPool(s.Fixtures.AdminContext, entity, pool.ID) - s.Require().Equal("fetching pool: error finding pool: not found", err.Error()) + s.Require().Equal("fetching pool: finding pool: not found", err.Error()) } func (s *EnterpriseTestSuite) TestDeleteEnterprisePoolErrUnauthorized() { @@ -487,9 +423,9 @@ func (s *EnterpriseTestSuite) TestDeleteEnterprisePoolErrUnauthorized() { } func (s *EnterpriseTestSuite) TestDeleteEnterprisePoolRunnersFailed() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreEnterprises["test-enterprise-1"].ID, - EntityType: params.ForgeEntityTypeEnterprise, + EntityType: params.GithubEntityTypeEnterprise, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -506,9 +442,9 @@ func (s *EnterpriseTestSuite) TestDeleteEnterprisePoolRunnersFailed() { } func (s *EnterpriseTestSuite) TestListEnterprisePools() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreEnterprises["test-enterprise-1"].ID, - EntityType: params.ForgeEntityTypeEnterprise, + EntityType: params.GithubEntityTypeEnterprise, } enterprisePools := []params.Pool{} for i := 1; i <= 2; i++ { @@ -533,9 +469,9 @@ func (s *EnterpriseTestSuite) TestListOrgPoolsErrUnauthorized() { } func (s *EnterpriseTestSuite) TestUpdateEnterprisePool() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreEnterprises["test-enterprise-1"].ID, - EntityType: params.ForgeEntityTypeEnterprise, + EntityType: params.GithubEntityTypeEnterprise, } enterprisePool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -556,9 +492,9 @@ func (s *EnterpriseTestSuite) TestUpdateEnterprisePoolErrUnauthorized() { } func (s *EnterpriseTestSuite) TestUpdateEnterprisePoolMinIdleGreaterThanMax() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreEnterprises["test-enterprise-1"].ID, - EntityType: params.ForgeEntityTypeEnterprise, + EntityType: params.GithubEntityTypeEnterprise, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -575,9 +511,9 @@ func (s *EnterpriseTestSuite) TestUpdateEnterprisePoolMinIdleGreaterThanMax() { } func (s *EnterpriseTestSuite) TestListEnterpriseInstances() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreEnterprises["test-enterprise-1"].ID, - EntityType: params.ForgeEntityTypeEnterprise, + EntityType: params.GithubEntityTypeEnterprise, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { diff --git a/runner/gitea_credentials.go b/runner/gitea_credentials.go deleted file mode 100644 index d66212f9..00000000 --- a/runner/gitea_credentials.go +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package runner - -import ( - "context" - "fmt" - - runnerErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm/auth" - "github.com/cloudbase/garm/params" -) - -func (r *Runner) ListGiteaCredentials(ctx context.Context) ([]params.ForgeCredentials, error) { - if !auth.IsAdmin(ctx) { - return nil, runnerErrors.ErrUnauthorized - } - - // Get the credentials from the store. The cache is always updated after the database successfully - // commits the transaction that created/updated the credentials. - // If we create a set of credentials then immediately after we call ListGiteaCredentials, - // there is a posibillity that not all creds will be in the cache. - creds, err := r.store.ListGiteaCredentials(ctx) - if err != nil { - return nil, fmt.Errorf("error fetching gitea credentials: %w", err) - } - return creds, nil -} - -func (r *Runner) CreateGiteaCredentials(ctx context.Context, param params.CreateGiteaCredentialsParams) (params.ForgeCredentials, error) { - if !auth.IsAdmin(ctx) { - return params.ForgeCredentials{}, runnerErrors.ErrUnauthorized - } - - if err := param.Validate(); err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error failed to validate gitea credentials params: %w", err) - } - - creds, err := r.store.CreateGiteaCredentials(ctx, param) - if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error failed to create gitea credentials: %w", err) - } - - return creds, nil -} - -func (r *Runner) GetGiteaCredentials(ctx context.Context, id uint) (params.ForgeCredentials, error) { - if !auth.IsAdmin(ctx) { - return params.ForgeCredentials{}, runnerErrors.ErrUnauthorized - } - - creds, err := r.store.GetGiteaCredentials(ctx, id, true) - if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error failed to get gitea credentials: %w", err) - } - - return creds, nil -} - -func (r *Runner) DeleteGiteaCredentials(ctx context.Context, id uint) error { - if !auth.IsAdmin(ctx) { - return runnerErrors.ErrUnauthorized - } - - if err := r.store.DeleteGiteaCredentials(ctx, id); err != nil { - return fmt.Errorf("error failed to delete gitea credentials: %w", err) - } - - return nil -} - -func (r *Runner) UpdateGiteaCredentials(ctx context.Context, id uint, param params.UpdateGiteaCredentialsParams) (params.ForgeCredentials, error) { - if !auth.IsAdmin(ctx) { - return params.ForgeCredentials{}, runnerErrors.ErrUnauthorized - } - - if err := param.Validate(); err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error failed to validate gitea credentials params: %w", err) - } - - newCreds, err := r.store.UpdateGiteaCredentials(ctx, id, param) - if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("error failed to update gitea credentials: %w", err) - } - - return newCreds, nil -} diff --git a/runner/gitea_endpoints.go b/runner/gitea_endpoints.go deleted file mode 100644 index 4a7e32d9..00000000 --- a/runner/gitea_endpoints.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package runner - -import ( - "context" - "fmt" - - runnerErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm/auth" - "github.com/cloudbase/garm/params" -) - -func (r *Runner) CreateGiteaEndpoint(ctx context.Context, param params.CreateGiteaEndpointParams) (params.ForgeEndpoint, error) { - if !auth.IsAdmin(ctx) { - return params.ForgeEndpoint{}, runnerErrors.ErrUnauthorized - } - - if err := param.Validate(); err != nil { - return params.ForgeEndpoint{}, fmt.Errorf("failed to validate gitea endpoint params: %w", err) - } - - ep, err := r.store.CreateGiteaEndpoint(ctx, param) - if err != nil { - return params.ForgeEndpoint{}, fmt.Errorf("failed to create gitea endpoint: %w", err) - } - - return ep, nil -} - -func (r *Runner) GetGiteaEndpoint(ctx context.Context, name string) (params.ForgeEndpoint, error) { - if !auth.IsAdmin(ctx) { - return params.ForgeEndpoint{}, runnerErrors.ErrUnauthorized - } - endpoint, err := r.store.GetGiteaEndpoint(ctx, name) - if err != nil { - return params.ForgeEndpoint{}, fmt.Errorf("failed to get gitea endpoint: %w", err) - } - - return endpoint, nil -} - -func (r *Runner) DeleteGiteaEndpoint(ctx context.Context, name string) error { - if !auth.IsAdmin(ctx) { - return runnerErrors.ErrUnauthorized - } - - err := r.store.DeleteGiteaEndpoint(ctx, name) - if err != nil { - return fmt.Errorf("failed to delete gitea endpoint: %w", err) - } - - return nil -} - -func (r *Runner) UpdateGiteaEndpoint(ctx context.Context, name string, param params.UpdateGiteaEndpointParams) (params.ForgeEndpoint, error) { - if !auth.IsAdmin(ctx) { - return params.ForgeEndpoint{}, runnerErrors.ErrUnauthorized - } - - if err := param.Validate(); err != nil { - return params.ForgeEndpoint{}, fmt.Errorf("failed to validate gitea endpoint params: %w", err) - } - - newEp, err := r.store.UpdateGiteaEndpoint(ctx, name, param) - if err != nil { - return params.ForgeEndpoint{}, fmt.Errorf("failed to update gitea endpoint: %w", err) - } - return newEp, nil -} - -func (r *Runner) ListGiteaEndpoints(ctx context.Context) ([]params.ForgeEndpoint, error) { - if !auth.IsAdmin(ctx) { - return nil, runnerErrors.ErrUnauthorized - } - - endpoints, err := r.store.ListGiteaEndpoints(ctx) - if err != nil { - return nil, fmt.Errorf("failed to list gitea endpoints: %w", err) - } - - return endpoints, nil -} diff --git a/runner/github_credentials.go b/runner/github_credentials.go index 5e1291ff..fbf9d330 100644 --- a/runner/github_credentials.go +++ b/runner/github_credentials.go @@ -1,85 +1,53 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package runner import ( "context" - "fmt" + + "github.com/pkg/errors" runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/auth" - "github.com/cloudbase/garm/cache" "github.com/cloudbase/garm/params" ) -func (r *Runner) ListCredentials(ctx context.Context) ([]params.ForgeCredentials, error) { +func (r *Runner) ListCredentials(ctx context.Context) ([]params.GithubCredentials, error) { if !auth.IsAdmin(ctx) { return nil, runnerErrors.ErrUnauthorized } - // Get the credentials from the store. The cache is always updated after the database successfully - // commits the transaction that created/updated the credentials. - // If we create a set of credentials then immediately after we call ListCredentials, - // there is a posibillity that not all creds will be in the cache. creds, err := r.store.ListGithubCredentials(ctx) if err != nil { - return nil, fmt.Errorf("error fetching github credentials: %w", err) + return nil, errors.Wrap(err, "fetching github credentials") } - // If we do have cache, update the rate limit for each credential. The rate limits are queried - // every 30 seconds and set in cache. - credsCache := cache.GetAllGithubCredentialsAsMap() - for idx, cred := range creds { - inCache, ok := credsCache[cred.ID] - if ok { - creds[idx].RateLimit = inCache.RateLimit - } - } return creds, nil } -func (r *Runner) CreateGithubCredentials(ctx context.Context, param params.CreateGithubCredentialsParams) (params.ForgeCredentials, error) { +func (r *Runner) CreateGithubCredentials(ctx context.Context, param params.CreateGithubCredentialsParams) (params.GithubCredentials, error) { if !auth.IsAdmin(ctx) { - return params.ForgeCredentials{}, runnerErrors.ErrUnauthorized + return params.GithubCredentials{}, runnerErrors.ErrUnauthorized } if err := param.Validate(); err != nil { - return params.ForgeCredentials{}, fmt.Errorf("failed to validate github credentials params: %w", err) + return params.GithubCredentials{}, errors.Wrap(err, "failed to validate github credentials params") } creds, err := r.store.CreateGithubCredentials(ctx, param) if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("failed to create github credentials: %w", err) + return params.GithubCredentials{}, errors.Wrap(err, "failed to create github credentials") } return creds, nil } -func (r *Runner) GetGithubCredentials(ctx context.Context, id uint) (params.ForgeCredentials, error) { +func (r *Runner) GetGithubCredentials(ctx context.Context, id uint) (params.GithubCredentials, error) { if !auth.IsAdmin(ctx) { - return params.ForgeCredentials{}, runnerErrors.ErrUnauthorized + return params.GithubCredentials{}, runnerErrors.ErrUnauthorized } creds, err := r.store.GetGithubCredentials(ctx, id, true) if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("failed to get github credentials: %w", err) - } - - cached, ok := cache.GetGithubCredentials((creds.ID)) - if ok { - creds.RateLimit = cached.RateLimit + return params.GithubCredentials{}, errors.Wrap(err, "failed to get github credentials") } return creds, nil @@ -91,24 +59,24 @@ func (r *Runner) DeleteGithubCredentials(ctx context.Context, id uint) error { } if err := r.store.DeleteGithubCredentials(ctx, id); err != nil { - return fmt.Errorf("failed to delete github credentials: %w", err) + return errors.Wrap(err, "failed to delete github credentials") } return nil } -func (r *Runner) UpdateGithubCredentials(ctx context.Context, id uint, param params.UpdateGithubCredentialsParams) (params.ForgeCredentials, error) { +func (r *Runner) UpdateGithubCredentials(ctx context.Context, id uint, param params.UpdateGithubCredentialsParams) (params.GithubCredentials, error) { if !auth.IsAdmin(ctx) { - return params.ForgeCredentials{}, runnerErrors.ErrUnauthorized + return params.GithubCredentials{}, runnerErrors.ErrUnauthorized } if err := param.Validate(); err != nil { - return params.ForgeCredentials{}, fmt.Errorf("failed to validate github credentials params: %w", err) + return params.GithubCredentials{}, errors.Wrap(err, "failed to validate github credentials params") } newCreds, err := r.store.UpdateGithubCredentials(ctx, id, param) if err != nil { - return params.ForgeCredentials{}, fmt.Errorf("failed to update github credentials: %w", err) + return params.GithubCredentials{}, errors.Wrap(err, "failed to update github credentials") } return newCreds, nil diff --git a/runner/github_endpoints.go b/runner/github_endpoints.go index 29965081..1f6431ea 100644 --- a/runner/github_endpoints.go +++ b/runner/github_endpoints.go @@ -1,52 +1,39 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package runner import ( "context" - "fmt" + + "github.com/pkg/errors" runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/auth" "github.com/cloudbase/garm/params" ) -func (r *Runner) CreateGithubEndpoint(ctx context.Context, param params.CreateGithubEndpointParams) (params.ForgeEndpoint, error) { +func (r *Runner) CreateGithubEndpoint(ctx context.Context, param params.CreateGithubEndpointParams) (params.GithubEndpoint, error) { if !auth.IsAdmin(ctx) { - return params.ForgeEndpoint{}, runnerErrors.ErrUnauthorized + return params.GithubEndpoint{}, runnerErrors.ErrUnauthorized } if err := param.Validate(); err != nil { - return params.ForgeEndpoint{}, fmt.Errorf("error failed to validate github endpoint params: %w", err) + return params.GithubEndpoint{}, errors.Wrap(err, "failed to validate github endpoint params") } ep, err := r.store.CreateGithubEndpoint(ctx, param) if err != nil { - return params.ForgeEndpoint{}, fmt.Errorf("failed to create github endpoint: %w", err) + return params.GithubEndpoint{}, errors.Wrap(err, "failed to create github endpoint") } return ep, nil } -func (r *Runner) GetGithubEndpoint(ctx context.Context, name string) (params.ForgeEndpoint, error) { +func (r *Runner) GetGithubEndpoint(ctx context.Context, name string) (params.GithubEndpoint, error) { if !auth.IsAdmin(ctx) { - return params.ForgeEndpoint{}, runnerErrors.ErrUnauthorized + return params.GithubEndpoint{}, runnerErrors.ErrUnauthorized } endpoint, err := r.store.GetGithubEndpoint(ctx, name) if err != nil { - return params.ForgeEndpoint{}, fmt.Errorf("failed to get github endpoint: %w", err) + return params.GithubEndpoint{}, errors.Wrap(err, "failed to get github endpoint") } return endpoint, nil @@ -59,36 +46,36 @@ func (r *Runner) DeleteGithubEndpoint(ctx context.Context, name string) error { err := r.store.DeleteGithubEndpoint(ctx, name) if err != nil { - return fmt.Errorf("failed to delete github endpoint: %w", err) + return errors.Wrap(err, "failed to delete github endpoint") } return nil } -func (r *Runner) UpdateGithubEndpoint(ctx context.Context, name string, param params.UpdateGithubEndpointParams) (params.ForgeEndpoint, error) { +func (r *Runner) UpdateGithubEndpoint(ctx context.Context, name string, param params.UpdateGithubEndpointParams) (params.GithubEndpoint, error) { if !auth.IsAdmin(ctx) { - return params.ForgeEndpoint{}, runnerErrors.ErrUnauthorized + return params.GithubEndpoint{}, runnerErrors.ErrUnauthorized } if err := param.Validate(); err != nil { - return params.ForgeEndpoint{}, fmt.Errorf("failed to validate github endpoint params: %w", err) + return params.GithubEndpoint{}, errors.Wrap(err, "failed to validate github endpoint params") } newEp, err := r.store.UpdateGithubEndpoint(ctx, name, param) if err != nil { - return params.ForgeEndpoint{}, fmt.Errorf("failed to update github endpoint: %w", err) + return params.GithubEndpoint{}, errors.Wrap(err, "failed to update github endpoint") } return newEp, nil } -func (r *Runner) ListGithubEndpoints(ctx context.Context) ([]params.ForgeEndpoint, error) { +func (r *Runner) ListGithubEndpoints(ctx context.Context) ([]params.GithubEndpoint, error) { if !auth.IsAdmin(ctx) { return nil, runnerErrors.ErrUnauthorized } endpoints, err := r.store.ListGithubEndpoints(ctx) if err != nil { - return nil, fmt.Errorf("failed to list github endpoints: %w", err) + return nil, errors.Wrap(err, "failed to list github endpoints") } return endpoints, nil diff --git a/runner/interfaces.go b/runner/interfaces.go index 3d4703f7..ff8129ed 100644 --- a/runner/interfaces.go +++ b/runner/interfaces.go @@ -43,7 +43,7 @@ type EnterprisePoolManager interface { GetEnterprisePoolManagers() (map[string]common.PoolManager, error) } -//go:generate go run github.com/vektra/mockery/v2@latest +//go:generate mockery --name=PoolManagerController type PoolManagerController interface { RepoPoolManager diff --git a/runner/metadata.go b/runner/metadata.go index b309b96e..6b19c0d5 100644 --- a/runner/metadata.go +++ b/runner/metadata.go @@ -1,27 +1,15 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package runner import ( "bytes" "context" "encoding/base64" - "errors" "fmt" "html/template" "log/slog" + "strings" + + "github.com/pkg/errors" "github.com/cloudbase/garm-provider-common/defaults" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -29,7 +17,7 @@ import ( "github.com/cloudbase/garm/params" ) -var githubSystemdUnitTemplate = `[Unit] +var systemdUnitTemplate = `[Unit] Description=GitHub Actions Runner ({{.ServiceName}}) After=network.target @@ -45,24 +33,11 @@ TimeoutStopSec=5min WantedBy=multi-user.target ` -var giteaSystemdUnitTemplate = `[Unit] -Description=Act Runner ({{.ServiceName}}) -After=network.target - -[Service] -ExecStart=/home/{{.RunAsUser}}/act-runner/act_runner daemon --once -User={{.RunAsUser}} -WorkingDirectory=/home/{{.RunAsUser}}/act-runner -KillMode=process -KillSignal=SIGTERM -TimeoutStopSec=5min -Restart=always - -[Install] -WantedBy=multi-user.target -` - func validateInstanceState(ctx context.Context) (params.Instance, error) { + if !auth.InstanceHasJITConfig(ctx) { + return params.Instance{}, fmt.Errorf("instance not configured for JIT: %w", runnerErrors.ErrNotFound) + } + status := auth.InstanceRunnerStatus(ctx) if status != params.RunnerPending && status != params.RunnerInstalling { return params.Instance{}, runnerErrors.ErrUnauthorized @@ -75,56 +50,6 @@ func validateInstanceState(ctx context.Context) (params.Instance, error) { return instance, nil } -func (r *Runner) getForgeEntityFromInstance(ctx context.Context, instance params.Instance) (params.ForgeEntity, error) { - var entityGetter params.EntityGetter - var err error - switch { - case instance.PoolID != "": - entityGetter, err = r.store.GetPoolByID(r.ctx, instance.PoolID) - case instance.ScaleSetID != 0: - entityGetter, err = r.store.GetScaleSetByID(r.ctx, instance.ScaleSetID) - default: - return params.ForgeEntity{}, errors.New("instance not associated with a pool or scale set") - } - - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext( - ctx, "failed to get entity getter", - "instance", instance.Name) - return params.ForgeEntity{}, fmt.Errorf("error fetching entity getter: %w", err) - } - - poolEntity, err := entityGetter.GetEntity() - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext( - ctx, "failed to get entity", - "instance", instance.Name) - return params.ForgeEntity{}, fmt.Errorf("error fetching entity: %w", err) - } - - entity, err := r.store.GetForgeEntity(r.ctx, poolEntity.EntityType, poolEntity.ID) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext( - ctx, "failed to get entity", - "instance", instance.Name) - return params.ForgeEntity{}, fmt.Errorf("error fetching entity: %w", err) - } - return entity, nil -} - -func (r *Runner) getServiceNameForEntity(entity params.ForgeEntity) (string, error) { - switch entity.EntityType { - case params.ForgeEntityTypeEnterprise: - return fmt.Sprintf("actions.runner.%s.%s", entity.Owner, entity.Name), nil - case params.ForgeEntityTypeOrganization: - return fmt.Sprintf("actions.runner.%s.%s", entity.Owner, entity.Name), nil - case params.ForgeEntityTypeRepository: - return fmt.Sprintf("actions.runner.%s-%s.%s", entity.Owner, entity.Name, entity.Name), nil - default: - return "", errors.New("unknown entity type") - } -} - func (r *Runner) GetRunnerServiceName(ctx context.Context) (string, error) { instance, err := validateInstanceState(ctx) if err != nil { @@ -132,52 +57,37 @@ func (r *Runner) GetRunnerServiceName(ctx context.Context) (string, error) { ctx, "failed to get instance params") return "", runnerErrors.ErrUnauthorized } - entity, err := r.getForgeEntityFromInstance(ctx, instance) + + pool, err := r.store.GetPoolByID(r.ctx, instance.PoolID) if err != nil { - slog.ErrorContext(r.ctx, "failed to get entity", "error", err) - return "", fmt.Errorf("error fetching entity: %w", err) + slog.With(slog.Any("error", err)).ErrorContext( + ctx, "failed to get pool", + "pool_id", instance.PoolID) + return "", errors.Wrap(err, "fetching pool") } - serviceName, err := r.getServiceNameForEntity(entity) - if err != nil { - slog.ErrorContext(r.ctx, "failed to get service name", "error", err) - return "", fmt.Errorf("error fetching service name: %w", err) + tpl := "actions.runner.%s.%s" + var serviceName string + switch pool.PoolType() { + case params.GithubEntityTypeEnterprise: + serviceName = fmt.Sprintf(tpl, pool.EnterpriseName, instance.Name) + case params.GithubEntityTypeOrganization: + serviceName = fmt.Sprintf(tpl, pool.OrgName, instance.Name) + case params.GithubEntityTypeRepository: + serviceName = fmt.Sprintf(tpl, strings.ReplaceAll(pool.RepoName, "/", "-"), instance.Name) } return serviceName, nil } func (r *Runner) GenerateSystemdUnitFile(ctx context.Context, runAsUser string) ([]byte, error) { - instance, err := validateInstanceState(ctx) + serviceName, err := r.GetRunnerServiceName(ctx) if err != nil { - slog.With(slog.Any("error", err)).ErrorContext( - ctx, "failed to get instance params") - return nil, runnerErrors.ErrUnauthorized - } - entity, err := r.getForgeEntityFromInstance(ctx, instance) - if err != nil { - slog.ErrorContext(r.ctx, "failed to get entity", "error", err) - return nil, fmt.Errorf("error fetching entity: %w", err) + return nil, errors.Wrap(err, "fetching runner service name") } - serviceName, err := r.getServiceNameForEntity(entity) + unitTemplate, err := template.New("").Parse(systemdUnitTemplate) if err != nil { - slog.ErrorContext(r.ctx, "failed to get service name", "error", err) - return nil, fmt.Errorf("error fetching service name: %w", err) - } - - var unitTemplate *template.Template - switch entity.Credentials.ForgeType { - case params.GithubEndpointType: - unitTemplate, err = template.New("").Parse(githubSystemdUnitTemplate) - case params.GiteaEndpointType: - unitTemplate, err = template.New("").Parse(giteaSystemdUnitTemplate) - default: - slog.ErrorContext(r.ctx, "unknown forge type", "forge_type", entity.Credentials.ForgeType) - return nil, errors.New("unknown forge type") - } - if err != nil { - slog.ErrorContext(r.ctx, "failed to parse template", "error", err) - return nil, fmt.Errorf("error parsing template: %w", err) + return nil, errors.Wrap(err, "parsing template") } if runAsUser == "" { @@ -194,17 +104,12 @@ func (r *Runner) GenerateSystemdUnitFile(ctx context.Context, runAsUser string) var unitFile bytes.Buffer if err := unitTemplate.Execute(&unitFile, data); err != nil { - slog.ErrorContext(r.ctx, "failed to execute template", "error", err) - return nil, fmt.Errorf("error executing template: %w", err) + return nil, errors.Wrap(err, "executing template") } return unitFile.Bytes(), nil } func (r *Runner) GetJITConfigFile(ctx context.Context, file string) ([]byte, error) { - if !auth.InstanceHasJITConfig(ctx) { - return nil, runnerErrors.NewNotFoundError("instance not configured for JIT") - } - instance, err := validateInstanceState(ctx) if err != nil { slog.With(slog.Any("error", err)).ErrorContext( @@ -214,12 +119,12 @@ func (r *Runner) GetJITConfigFile(ctx context.Context, file string) ([]byte, err jitConfig := instance.JitConfiguration contents, ok := jitConfig[file] if !ok { - return nil, runnerErrors.NewNotFoundError("could not find file %q", file) + return nil, errors.Wrap(runnerErrors.ErrNotFound, "retrieving file") } decoded, err := base64.StdEncoding.DecodeString(contents) if err != nil { - return nil, fmt.Errorf("error decoding file contents: %w", err) + return nil, errors.Wrap(err, "decoding file contents") } return decoded, nil @@ -248,12 +153,12 @@ func (r *Runner) GetInstanceGithubRegistrationToken(ctx context.Context) (string poolMgr, err := r.getPoolManagerFromInstance(ctx, instance) if err != nil { - return "", fmt.Errorf("error fetching pool manager for instance: %w", err) + return "", errors.Wrap(err, "fetching pool manager for instance") } token, err := poolMgr.GithubRunnerRegistrationToken() if err != nil { - return "", fmt.Errorf("error fetching runner token: %w", err) + return "", errors.Wrap(err, "fetching runner token") } tokenFetched := true @@ -262,11 +167,11 @@ func (r *Runner) GetInstanceGithubRegistrationToken(ctx context.Context) (string } if _, err := r.store.UpdateInstance(r.ctx, instance.Name, updateParams); err != nil { - return "", fmt.Errorf("error setting token_fetched for instance: %w", err) + return "", errors.Wrap(err, "setting token_fetched for instance") } if err := r.store.AddInstanceEvent(ctx, instance.Name, params.FetchTokenEvent, params.EventInfo, "runner registration token was retrieved"); err != nil { - return "", fmt.Errorf("error recording event: %w", err) + return "", errors.Wrap(err, "recording event") } return token, nil @@ -282,7 +187,7 @@ func (r *Runner) GetRootCertificateBundle(ctx context.Context) (params.Certifica poolMgr, err := r.getPoolManagerFromInstance(ctx, instance) if err != nil { - return params.CertificateBundle{}, fmt.Errorf("error fetching pool manager for instance: %w", err) + return params.CertificateBundle{}, errors.Wrap(err, "fetching pool manager for instance") } bundle, err := poolMgr.RootCABundle() diff --git a/runner/metrics/enterprise.go b/runner/metrics/enterprise.go index be6eba66..407c0fc4 100644 --- a/runner/metrics/enterprise.go +++ b/runner/metrics/enterprise.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package metrics import ( @@ -19,7 +5,6 @@ import ( "strconv" "github.com/cloudbase/garm/metrics" - "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner" //nolint:typecheck ) @@ -29,7 +14,7 @@ func CollectEnterpriseMetric(ctx context.Context, r *runner.Runner) error { metrics.EnterpriseInfo.Reset() metrics.EnterprisePoolManagerStatus.Reset() - enterprises, err := r.ListEnterprises(ctx, params.EnterpriseFilter{}) + enterprises, err := r.ListEnterprises(ctx) if err != nil { return err } diff --git a/runner/metrics/health.go b/runner/metrics/health.go index fcd254df..05e1ed9b 100644 --- a/runner/metrics/health.go +++ b/runner/metrics/health.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package metrics import ( diff --git a/runner/metrics/instance.go b/runner/metrics/instance.go index bc6bed0a..06fd4881 100644 --- a/runner/metrics/instance.go +++ b/runner/metrics/instance.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package metrics import ( diff --git a/runner/metrics/metrics.go b/runner/metrics/metrics.go index 772ba86a..f9f70864 100644 --- a/runner/metrics/metrics.go +++ b/runner/metrics/metrics.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package metrics import ( diff --git a/runner/metrics/organization.go b/runner/metrics/organization.go index 6bf6d9e5..6b9f6b71 100644 --- a/runner/metrics/organization.go +++ b/runner/metrics/organization.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package metrics import ( @@ -19,7 +5,6 @@ import ( "strconv" "github.com/cloudbase/garm/metrics" - "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner" ) @@ -29,7 +14,7 @@ func CollectOrganizationMetric(ctx context.Context, r *runner.Runner) error { metrics.OrganizationInfo.Reset() metrics.OrganizationPoolManagerStatus.Reset() - organizations, err := r.ListOrganizations(ctx, params.OrganizationFilter{}) + organizations, err := r.ListOrganizations(ctx) if err != nil { return err } diff --git a/runner/metrics/pool.go b/runner/metrics/pool.go index 6b06a8b9..44ad27a8 100644 --- a/runner/metrics/pool.go +++ b/runner/metrics/pool.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package metrics import ( diff --git a/runner/metrics/provider.go b/runner/metrics/provider.go index 1d7a065d..e2b38a9f 100644 --- a/runner/metrics/provider.go +++ b/runner/metrics/provider.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package metrics import ( diff --git a/runner/metrics/repository.go b/runner/metrics/repository.go index a2e8fa57..b76fcc0e 100644 --- a/runner/metrics/repository.go +++ b/runner/metrics/repository.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package metrics import ( @@ -19,7 +5,6 @@ import ( "strconv" "github.com/cloudbase/garm/metrics" - "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner" ) @@ -28,7 +13,7 @@ func CollectRepositoryMetric(ctx context.Context, r *runner.Runner) error { metrics.EnterpriseInfo.Reset() metrics.EnterprisePoolManagerStatus.Reset() - repositories, err := r.ListRepositories(ctx, params.RepositoryFilter{}) + repositories, err := r.ListRepositories(ctx) if err != nil { return err } diff --git a/runner/mocks/PoolManagerController.go b/runner/mocks/PoolManagerController.go index b17196ec..05720ebe 100644 --- a/runner/mocks/PoolManagerController.go +++ b/runner/mocks/PoolManagerController.go @@ -1,4 +1,4 @@ -// Code generated by mockery. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks @@ -19,14 +19,6 @@ type PoolManagerController struct { mock.Mock } -type PoolManagerController_Expecter struct { - mock *mock.Mock -} - -func (_m *PoolManagerController) EXPECT() *PoolManagerController_Expecter { - return &PoolManagerController_Expecter{mock: &_m.Mock} -} - // CreateEnterprisePoolManager provides a mock function with given fields: ctx, enterprise, providers, store func (_m *PoolManagerController) CreateEnterprisePoolManager(ctx context.Context, enterprise params.Enterprise, providers map[string]common.Provider, store databasecommon.Store) (common.PoolManager, error) { ret := _m.Called(ctx, enterprise, providers, store) @@ -57,37 +49,6 @@ func (_m *PoolManagerController) CreateEnterprisePoolManager(ctx context.Context return r0, r1 } -// PoolManagerController_CreateEnterprisePoolManager_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateEnterprisePoolManager' -type PoolManagerController_CreateEnterprisePoolManager_Call struct { - *mock.Call -} - -// CreateEnterprisePoolManager is a helper method to define mock.On call -// - ctx context.Context -// - enterprise params.Enterprise -// - providers map[string]common.Provider -// - store databasecommon.Store -func (_e *PoolManagerController_Expecter) CreateEnterprisePoolManager(ctx interface{}, enterprise interface{}, providers interface{}, store interface{}) *PoolManagerController_CreateEnterprisePoolManager_Call { - return &PoolManagerController_CreateEnterprisePoolManager_Call{Call: _e.mock.On("CreateEnterprisePoolManager", ctx, enterprise, providers, store)} -} - -func (_c *PoolManagerController_CreateEnterprisePoolManager_Call) Run(run func(ctx context.Context, enterprise params.Enterprise, providers map[string]common.Provider, store databasecommon.Store)) *PoolManagerController_CreateEnterprisePoolManager_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.Enterprise), args[2].(map[string]common.Provider), args[3].(databasecommon.Store)) - }) - return _c -} - -func (_c *PoolManagerController_CreateEnterprisePoolManager_Call) Return(_a0 common.PoolManager, _a1 error) *PoolManagerController_CreateEnterprisePoolManager_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *PoolManagerController_CreateEnterprisePoolManager_Call) RunAndReturn(run func(context.Context, params.Enterprise, map[string]common.Provider, databasecommon.Store) (common.PoolManager, error)) *PoolManagerController_CreateEnterprisePoolManager_Call { - _c.Call.Return(run) - return _c -} - // CreateOrgPoolManager provides a mock function with given fields: ctx, org, providers, store func (_m *PoolManagerController) CreateOrgPoolManager(ctx context.Context, org params.Organization, providers map[string]common.Provider, store databasecommon.Store) (common.PoolManager, error) { ret := _m.Called(ctx, org, providers, store) @@ -118,37 +79,6 @@ func (_m *PoolManagerController) CreateOrgPoolManager(ctx context.Context, org p return r0, r1 } -// PoolManagerController_CreateOrgPoolManager_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateOrgPoolManager' -type PoolManagerController_CreateOrgPoolManager_Call struct { - *mock.Call -} - -// CreateOrgPoolManager is a helper method to define mock.On call -// - ctx context.Context -// - org params.Organization -// - providers map[string]common.Provider -// - store databasecommon.Store -func (_e *PoolManagerController_Expecter) CreateOrgPoolManager(ctx interface{}, org interface{}, providers interface{}, store interface{}) *PoolManagerController_CreateOrgPoolManager_Call { - return &PoolManagerController_CreateOrgPoolManager_Call{Call: _e.mock.On("CreateOrgPoolManager", ctx, org, providers, store)} -} - -func (_c *PoolManagerController_CreateOrgPoolManager_Call) Run(run func(ctx context.Context, org params.Organization, providers map[string]common.Provider, store databasecommon.Store)) *PoolManagerController_CreateOrgPoolManager_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.Organization), args[2].(map[string]common.Provider), args[3].(databasecommon.Store)) - }) - return _c -} - -func (_c *PoolManagerController_CreateOrgPoolManager_Call) Return(_a0 common.PoolManager, _a1 error) *PoolManagerController_CreateOrgPoolManager_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *PoolManagerController_CreateOrgPoolManager_Call) RunAndReturn(run func(context.Context, params.Organization, map[string]common.Provider, databasecommon.Store) (common.PoolManager, error)) *PoolManagerController_CreateOrgPoolManager_Call { - _c.Call.Return(run) - return _c -} - // CreateRepoPoolManager provides a mock function with given fields: ctx, repo, providers, store func (_m *PoolManagerController) CreateRepoPoolManager(ctx context.Context, repo params.Repository, providers map[string]common.Provider, store databasecommon.Store) (common.PoolManager, error) { ret := _m.Called(ctx, repo, providers, store) @@ -179,37 +109,6 @@ func (_m *PoolManagerController) CreateRepoPoolManager(ctx context.Context, repo return r0, r1 } -// PoolManagerController_CreateRepoPoolManager_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateRepoPoolManager' -type PoolManagerController_CreateRepoPoolManager_Call struct { - *mock.Call -} - -// CreateRepoPoolManager is a helper method to define mock.On call -// - ctx context.Context -// - repo params.Repository -// - providers map[string]common.Provider -// - store databasecommon.Store -func (_e *PoolManagerController_Expecter) CreateRepoPoolManager(ctx interface{}, repo interface{}, providers interface{}, store interface{}) *PoolManagerController_CreateRepoPoolManager_Call { - return &PoolManagerController_CreateRepoPoolManager_Call{Call: _e.mock.On("CreateRepoPoolManager", ctx, repo, providers, store)} -} - -func (_c *PoolManagerController_CreateRepoPoolManager_Call) Run(run func(ctx context.Context, repo params.Repository, providers map[string]common.Provider, store databasecommon.Store)) *PoolManagerController_CreateRepoPoolManager_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(params.Repository), args[2].(map[string]common.Provider), args[3].(databasecommon.Store)) - }) - return _c -} - -func (_c *PoolManagerController_CreateRepoPoolManager_Call) Return(_a0 common.PoolManager, _a1 error) *PoolManagerController_CreateRepoPoolManager_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *PoolManagerController_CreateRepoPoolManager_Call) RunAndReturn(run func(context.Context, params.Repository, map[string]common.Provider, databasecommon.Store) (common.PoolManager, error)) *PoolManagerController_CreateRepoPoolManager_Call { - _c.Call.Return(run) - return _c -} - // DeleteEnterprisePoolManager provides a mock function with given fields: enterprise func (_m *PoolManagerController) DeleteEnterprisePoolManager(enterprise params.Enterprise) error { ret := _m.Called(enterprise) @@ -228,34 +127,6 @@ func (_m *PoolManagerController) DeleteEnterprisePoolManager(enterprise params.E return r0 } -// PoolManagerController_DeleteEnterprisePoolManager_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteEnterprisePoolManager' -type PoolManagerController_DeleteEnterprisePoolManager_Call struct { - *mock.Call -} - -// DeleteEnterprisePoolManager is a helper method to define mock.On call -// - enterprise params.Enterprise -func (_e *PoolManagerController_Expecter) DeleteEnterprisePoolManager(enterprise interface{}) *PoolManagerController_DeleteEnterprisePoolManager_Call { - return &PoolManagerController_DeleteEnterprisePoolManager_Call{Call: _e.mock.On("DeleteEnterprisePoolManager", enterprise)} -} - -func (_c *PoolManagerController_DeleteEnterprisePoolManager_Call) Run(run func(enterprise params.Enterprise)) *PoolManagerController_DeleteEnterprisePoolManager_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(params.Enterprise)) - }) - return _c -} - -func (_c *PoolManagerController_DeleteEnterprisePoolManager_Call) Return(_a0 error) *PoolManagerController_DeleteEnterprisePoolManager_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *PoolManagerController_DeleteEnterprisePoolManager_Call) RunAndReturn(run func(params.Enterprise) error) *PoolManagerController_DeleteEnterprisePoolManager_Call { - _c.Call.Return(run) - return _c -} - // DeleteOrgPoolManager provides a mock function with given fields: org func (_m *PoolManagerController) DeleteOrgPoolManager(org params.Organization) error { ret := _m.Called(org) @@ -274,34 +145,6 @@ func (_m *PoolManagerController) DeleteOrgPoolManager(org params.Organization) e return r0 } -// PoolManagerController_DeleteOrgPoolManager_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteOrgPoolManager' -type PoolManagerController_DeleteOrgPoolManager_Call struct { - *mock.Call -} - -// DeleteOrgPoolManager is a helper method to define mock.On call -// - org params.Organization -func (_e *PoolManagerController_Expecter) DeleteOrgPoolManager(org interface{}) *PoolManagerController_DeleteOrgPoolManager_Call { - return &PoolManagerController_DeleteOrgPoolManager_Call{Call: _e.mock.On("DeleteOrgPoolManager", org)} -} - -func (_c *PoolManagerController_DeleteOrgPoolManager_Call) Run(run func(org params.Organization)) *PoolManagerController_DeleteOrgPoolManager_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(params.Organization)) - }) - return _c -} - -func (_c *PoolManagerController_DeleteOrgPoolManager_Call) Return(_a0 error) *PoolManagerController_DeleteOrgPoolManager_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *PoolManagerController_DeleteOrgPoolManager_Call) RunAndReturn(run func(params.Organization) error) *PoolManagerController_DeleteOrgPoolManager_Call { - _c.Call.Return(run) - return _c -} - // DeleteRepoPoolManager provides a mock function with given fields: repo func (_m *PoolManagerController) DeleteRepoPoolManager(repo params.Repository) error { ret := _m.Called(repo) @@ -320,34 +163,6 @@ func (_m *PoolManagerController) DeleteRepoPoolManager(repo params.Repository) e return r0 } -// PoolManagerController_DeleteRepoPoolManager_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteRepoPoolManager' -type PoolManagerController_DeleteRepoPoolManager_Call struct { - *mock.Call -} - -// DeleteRepoPoolManager is a helper method to define mock.On call -// - repo params.Repository -func (_e *PoolManagerController_Expecter) DeleteRepoPoolManager(repo interface{}) *PoolManagerController_DeleteRepoPoolManager_Call { - return &PoolManagerController_DeleteRepoPoolManager_Call{Call: _e.mock.On("DeleteRepoPoolManager", repo)} -} - -func (_c *PoolManagerController_DeleteRepoPoolManager_Call) Run(run func(repo params.Repository)) *PoolManagerController_DeleteRepoPoolManager_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(params.Repository)) - }) - return _c -} - -func (_c *PoolManagerController_DeleteRepoPoolManager_Call) Return(_a0 error) *PoolManagerController_DeleteRepoPoolManager_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *PoolManagerController_DeleteRepoPoolManager_Call) RunAndReturn(run func(params.Repository) error) *PoolManagerController_DeleteRepoPoolManager_Call { - _c.Call.Return(run) - return _c -} - // GetEnterprisePoolManager provides a mock function with given fields: enterprise func (_m *PoolManagerController) GetEnterprisePoolManager(enterprise params.Enterprise) (common.PoolManager, error) { ret := _m.Called(enterprise) @@ -378,34 +193,6 @@ func (_m *PoolManagerController) GetEnterprisePoolManager(enterprise params.Ente return r0, r1 } -// PoolManagerController_GetEnterprisePoolManager_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEnterprisePoolManager' -type PoolManagerController_GetEnterprisePoolManager_Call struct { - *mock.Call -} - -// GetEnterprisePoolManager is a helper method to define mock.On call -// - enterprise params.Enterprise -func (_e *PoolManagerController_Expecter) GetEnterprisePoolManager(enterprise interface{}) *PoolManagerController_GetEnterprisePoolManager_Call { - return &PoolManagerController_GetEnterprisePoolManager_Call{Call: _e.mock.On("GetEnterprisePoolManager", enterprise)} -} - -func (_c *PoolManagerController_GetEnterprisePoolManager_Call) Run(run func(enterprise params.Enterprise)) *PoolManagerController_GetEnterprisePoolManager_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(params.Enterprise)) - }) - return _c -} - -func (_c *PoolManagerController_GetEnterprisePoolManager_Call) Return(_a0 common.PoolManager, _a1 error) *PoolManagerController_GetEnterprisePoolManager_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *PoolManagerController_GetEnterprisePoolManager_Call) RunAndReturn(run func(params.Enterprise) (common.PoolManager, error)) *PoolManagerController_GetEnterprisePoolManager_Call { - _c.Call.Return(run) - return _c -} - // GetEnterprisePoolManagers provides a mock function with no fields func (_m *PoolManagerController) GetEnterprisePoolManagers() (map[string]common.PoolManager, error) { ret := _m.Called() @@ -436,33 +223,6 @@ func (_m *PoolManagerController) GetEnterprisePoolManagers() (map[string]common. return r0, r1 } -// PoolManagerController_GetEnterprisePoolManagers_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEnterprisePoolManagers' -type PoolManagerController_GetEnterprisePoolManagers_Call struct { - *mock.Call -} - -// GetEnterprisePoolManagers is a helper method to define mock.On call -func (_e *PoolManagerController_Expecter) GetEnterprisePoolManagers() *PoolManagerController_GetEnterprisePoolManagers_Call { - return &PoolManagerController_GetEnterprisePoolManagers_Call{Call: _e.mock.On("GetEnterprisePoolManagers")} -} - -func (_c *PoolManagerController_GetEnterprisePoolManagers_Call) Run(run func()) *PoolManagerController_GetEnterprisePoolManagers_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *PoolManagerController_GetEnterprisePoolManagers_Call) Return(_a0 map[string]common.PoolManager, _a1 error) *PoolManagerController_GetEnterprisePoolManagers_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *PoolManagerController_GetEnterprisePoolManagers_Call) RunAndReturn(run func() (map[string]common.PoolManager, error)) *PoolManagerController_GetEnterprisePoolManagers_Call { - _c.Call.Return(run) - return _c -} - // GetOrgPoolManager provides a mock function with given fields: org func (_m *PoolManagerController) GetOrgPoolManager(org params.Organization) (common.PoolManager, error) { ret := _m.Called(org) @@ -493,34 +253,6 @@ func (_m *PoolManagerController) GetOrgPoolManager(org params.Organization) (com return r0, r1 } -// PoolManagerController_GetOrgPoolManager_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetOrgPoolManager' -type PoolManagerController_GetOrgPoolManager_Call struct { - *mock.Call -} - -// GetOrgPoolManager is a helper method to define mock.On call -// - org params.Organization -func (_e *PoolManagerController_Expecter) GetOrgPoolManager(org interface{}) *PoolManagerController_GetOrgPoolManager_Call { - return &PoolManagerController_GetOrgPoolManager_Call{Call: _e.mock.On("GetOrgPoolManager", org)} -} - -func (_c *PoolManagerController_GetOrgPoolManager_Call) Run(run func(org params.Organization)) *PoolManagerController_GetOrgPoolManager_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(params.Organization)) - }) - return _c -} - -func (_c *PoolManagerController_GetOrgPoolManager_Call) Return(_a0 common.PoolManager, _a1 error) *PoolManagerController_GetOrgPoolManager_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *PoolManagerController_GetOrgPoolManager_Call) RunAndReturn(run func(params.Organization) (common.PoolManager, error)) *PoolManagerController_GetOrgPoolManager_Call { - _c.Call.Return(run) - return _c -} - // GetOrgPoolManagers provides a mock function with no fields func (_m *PoolManagerController) GetOrgPoolManagers() (map[string]common.PoolManager, error) { ret := _m.Called() @@ -551,33 +283,6 @@ func (_m *PoolManagerController) GetOrgPoolManagers() (map[string]common.PoolMan return r0, r1 } -// PoolManagerController_GetOrgPoolManagers_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetOrgPoolManagers' -type PoolManagerController_GetOrgPoolManagers_Call struct { - *mock.Call -} - -// GetOrgPoolManagers is a helper method to define mock.On call -func (_e *PoolManagerController_Expecter) GetOrgPoolManagers() *PoolManagerController_GetOrgPoolManagers_Call { - return &PoolManagerController_GetOrgPoolManagers_Call{Call: _e.mock.On("GetOrgPoolManagers")} -} - -func (_c *PoolManagerController_GetOrgPoolManagers_Call) Run(run func()) *PoolManagerController_GetOrgPoolManagers_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *PoolManagerController_GetOrgPoolManagers_Call) Return(_a0 map[string]common.PoolManager, _a1 error) *PoolManagerController_GetOrgPoolManagers_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *PoolManagerController_GetOrgPoolManagers_Call) RunAndReturn(run func() (map[string]common.PoolManager, error)) *PoolManagerController_GetOrgPoolManagers_Call { - _c.Call.Return(run) - return _c -} - // GetRepoPoolManager provides a mock function with given fields: repo func (_m *PoolManagerController) GetRepoPoolManager(repo params.Repository) (common.PoolManager, error) { ret := _m.Called(repo) @@ -608,34 +313,6 @@ func (_m *PoolManagerController) GetRepoPoolManager(repo params.Repository) (com return r0, r1 } -// PoolManagerController_GetRepoPoolManager_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetRepoPoolManager' -type PoolManagerController_GetRepoPoolManager_Call struct { - *mock.Call -} - -// GetRepoPoolManager is a helper method to define mock.On call -// - repo params.Repository -func (_e *PoolManagerController_Expecter) GetRepoPoolManager(repo interface{}) *PoolManagerController_GetRepoPoolManager_Call { - return &PoolManagerController_GetRepoPoolManager_Call{Call: _e.mock.On("GetRepoPoolManager", repo)} -} - -func (_c *PoolManagerController_GetRepoPoolManager_Call) Run(run func(repo params.Repository)) *PoolManagerController_GetRepoPoolManager_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(params.Repository)) - }) - return _c -} - -func (_c *PoolManagerController_GetRepoPoolManager_Call) Return(_a0 common.PoolManager, _a1 error) *PoolManagerController_GetRepoPoolManager_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *PoolManagerController_GetRepoPoolManager_Call) RunAndReturn(run func(params.Repository) (common.PoolManager, error)) *PoolManagerController_GetRepoPoolManager_Call { - _c.Call.Return(run) - return _c -} - // GetRepoPoolManagers provides a mock function with no fields func (_m *PoolManagerController) GetRepoPoolManagers() (map[string]common.PoolManager, error) { ret := _m.Called() @@ -666,33 +343,6 @@ func (_m *PoolManagerController) GetRepoPoolManagers() (map[string]common.PoolMa return r0, r1 } -// PoolManagerController_GetRepoPoolManagers_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetRepoPoolManagers' -type PoolManagerController_GetRepoPoolManagers_Call struct { - *mock.Call -} - -// GetRepoPoolManagers is a helper method to define mock.On call -func (_e *PoolManagerController_Expecter) GetRepoPoolManagers() *PoolManagerController_GetRepoPoolManagers_Call { - return &PoolManagerController_GetRepoPoolManagers_Call{Call: _e.mock.On("GetRepoPoolManagers")} -} - -func (_c *PoolManagerController_GetRepoPoolManagers_Call) Run(run func()) *PoolManagerController_GetRepoPoolManagers_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *PoolManagerController_GetRepoPoolManagers_Call) Return(_a0 map[string]common.PoolManager, _a1 error) *PoolManagerController_GetRepoPoolManagers_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *PoolManagerController_GetRepoPoolManagers_Call) RunAndReturn(run func() (map[string]common.PoolManager, error)) *PoolManagerController_GetRepoPoolManagers_Call { - _c.Call.Return(run) - return _c -} - // NewPoolManagerController creates a new instance of PoolManagerController. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewPoolManagerController(t interface { diff --git a/runner/organizations.go b/runner/organizations.go index ffdd1c6c..39aa788b 100644 --- a/runner/organizations.go +++ b/runner/organizations.go @@ -16,11 +16,12 @@ package runner import ( "context" - "errors" "fmt" "log/slog" "strings" + "github.com/pkg/errors" + runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/auth" "github.com/cloudbase/garm/params" @@ -34,21 +35,10 @@ func (r *Runner) CreateOrganization(ctx context.Context, param params.CreateOrgP } if err := param.Validate(); err != nil { - return params.Organization{}, fmt.Errorf("error validating params: %w", err) - } - - var creds params.ForgeCredentials - switch param.ForgeType { - case params.GithubEndpointType: - slog.DebugContext(ctx, "getting github credentials") - creds, err = r.store.GetGithubCredentialsByName(ctx, param.CredentialsName, true) - case params.GiteaEndpointType: - slog.DebugContext(ctx, "getting gitea credentials") - creds, err = r.store.GetGiteaCredentialsByName(ctx, param.CredentialsName, true) - default: - creds, err = r.ResolveForgeCredentialByName(ctx, param.CredentialsName) + return params.Organization{}, errors.Wrap(err, "validating params") } + creds, err := r.store.GetGithubCredentialsByName(ctx, param.CredentialsName, true) if err != nil { return params.Organization{}, runnerErrors.NewBadRequestError("credentials %s not defined", param.CredentialsName) } @@ -56,15 +46,15 @@ func (r *Runner) CreateOrganization(ctx context.Context, param params.CreateOrgP _, err = r.store.GetOrganization(ctx, param.Name, creds.Endpoint.Name) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { - return params.Organization{}, fmt.Errorf("error fetching org: %w", err) + return params.Organization{}, errors.Wrap(err, "fetching org") } } else { return params.Organization{}, runnerErrors.NewConflictError("organization %s already exists", param.Name) } - org, err = r.store.CreateOrganization(ctx, param.Name, creds, param.WebhookSecret, param.PoolBalancerType) + org, err = r.store.CreateOrganization(ctx, param.Name, creds.Name, param.WebhookSecret, param.PoolBalancerType) if err != nil { - return params.Organization{}, fmt.Errorf("error creating organization: %w", err) + return params.Organization{}, errors.Wrap(err, "creating organization") } defer func() { @@ -81,7 +71,7 @@ func (r *Runner) CreateOrganization(ctx context.Context, param params.CreateOrgP // updating the store. poolMgr, err := r.poolManagerCtrl.CreateOrgPoolManager(r.ctx, org, r.providers, r.store) if err != nil { - return params.Organization{}, fmt.Errorf("error creating org pool manager: %w", err) + return params.Organization{}, errors.Wrap(err, "creating org pool manager") } if err := poolMgr.Start(); err != nil { if deleteErr := r.poolManagerCtrl.DeleteOrgPoolManager(org); deleteErr != nil { @@ -89,19 +79,19 @@ func (r *Runner) CreateOrganization(ctx context.Context, param params.CreateOrgP ctx, "failed to cleanup pool manager for org", "org_id", org.ID) } - return params.Organization{}, fmt.Errorf("error starting org pool manager: %w", err) + return params.Organization{}, errors.Wrap(err, "starting org pool manager") } return org, nil } -func (r *Runner) ListOrganizations(ctx context.Context, filter params.OrganizationFilter) ([]params.Organization, error) { +func (r *Runner) ListOrganizations(ctx context.Context) ([]params.Organization, error) { if !auth.IsAdmin(ctx) { return nil, runnerErrors.ErrUnauthorized } - orgs, err := r.store.ListOrganizations(ctx, filter) + orgs, err := r.store.ListOrganizations(ctx) if err != nil { - return nil, fmt.Errorf("error listing organizations: %w", err) + return nil, errors.Wrap(err, "listing organizations") } var allOrgs []params.Organization @@ -128,7 +118,7 @@ func (r *Runner) GetOrganizationByID(ctx context.Context, orgID string) (params. org, err := r.store.GetOrganizationByID(ctx, orgID) if err != nil { - return params.Organization{}, fmt.Errorf("error fetching organization: %w", err) + return params.Organization{}, errors.Wrap(err, "fetching organization") } poolMgr, err := r.poolManagerCtrl.GetOrgPoolManager(org) @@ -147,17 +137,17 @@ func (r *Runner) DeleteOrganization(ctx context.Context, orgID string, keepWebho org, err := r.store.GetOrganizationByID(ctx, orgID) if err != nil { - return fmt.Errorf("error fetching org: %w", err) + return errors.Wrap(err, "fetching org") } entity, err := org.GetEntity() if err != nil { - return fmt.Errorf("error getting entity: %w", err) + return errors.Wrap(err, "getting entity") } pools, err := r.store.ListEntityPools(ctx, entity) if err != nil { - return fmt.Errorf("error fetching org pools: %w", err) + return errors.Wrap(err, "fetching org pools") } if len(pools) > 0 { @@ -169,19 +159,10 @@ func (r *Runner) DeleteOrganization(ctx context.Context, orgID string, keepWebho return runnerErrors.NewBadRequestError("org has pools defined (%s)", strings.Join(poolIDs, ", ")) } - scaleSets, err := r.store.ListEntityScaleSets(ctx, entity) - if err != nil { - return fmt.Errorf("error fetching organization scale sets: %w", err) - } - - if len(scaleSets) > 0 { - return runnerErrors.NewBadRequestError("organization has scale sets defined; delete them first") - } - if !keepWebhook && r.config.Default.EnableWebhookManagement { poolMgr, err := r.poolManagerCtrl.GetOrgPoolManager(org) if err != nil { - return fmt.Errorf("error fetching pool manager: %w", err) + return errors.Wrap(err, "fetching pool manager") } if err := poolMgr.UninstallWebhook(ctx); err != nil { @@ -194,11 +175,11 @@ func (r *Runner) DeleteOrganization(ctx context.Context, orgID string, keepWebho } if err := r.poolManagerCtrl.DeleteOrgPoolManager(org); err != nil { - return fmt.Errorf("error deleting org pool manager: %w", err) + return errors.Wrap(err, "deleting org pool manager") } if err := r.store.DeleteOrganization(ctx, orgID); err != nil { - return fmt.Errorf("error removing organization %s: %w", orgID, err) + return errors.Wrapf(err, "removing organization %s", orgID) } return nil } @@ -219,7 +200,7 @@ func (r *Runner) UpdateOrganization(ctx context.Context, orgID string, param par org, err := r.store.UpdateOrganization(ctx, orgID, param) if err != nil { - return params.Organization{}, fmt.Errorf("error updating org: %w", err) + return params.Organization{}, errors.Wrap(err, "updating org") } poolMgr, err := r.poolManagerCtrl.GetOrgPoolManager(org) @@ -238,21 +219,21 @@ func (r *Runner) CreateOrgPool(ctx context.Context, orgID string, param params.C createPoolParams, err := r.appendTagsToCreatePoolParams(param) if err != nil { - return params.Pool{}, fmt.Errorf("error fetching pool params: %w", err) + return params.Pool{}, errors.Wrap(err, "fetching pool params") } if param.RunnerBootstrapTimeout == 0 { param.RunnerBootstrapTimeout = appdefaults.DefaultRunnerBootstrapTimeout } - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: orgID, - EntityType: params.ForgeEntityTypeOrganization, + EntityType: params.GithubEntityTypeOrganization, } pool, err := r.store.CreateEntityPool(ctx, entity, createPoolParams) if err != nil { - return params.Pool{}, fmt.Errorf("error creating pool: %w", err) + return params.Pool{}, errors.Wrap(err, "creating pool") } return pool, nil @@ -263,14 +244,14 @@ func (r *Runner) GetOrgPoolByID(ctx context.Context, orgID, poolID string) (para return params.Pool{}, runnerErrors.ErrUnauthorized } - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: orgID, - EntityType: params.ForgeEntityTypeOrganization, + EntityType: params.GithubEntityTypeOrganization, } pool, err := r.store.GetEntityPool(ctx, entity, poolID) if err != nil { - return params.Pool{}, fmt.Errorf("error fetching pool: %w", err) + return params.Pool{}, errors.Wrap(err, "fetching pool") } return pool, nil @@ -281,15 +262,15 @@ func (r *Runner) DeleteOrgPool(ctx context.Context, orgID, poolID string) error return runnerErrors.ErrUnauthorized } - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: orgID, - EntityType: params.ForgeEntityTypeOrganization, + EntityType: params.GithubEntityTypeOrganization, } pool, err := r.store.GetEntityPool(ctx, entity, poolID) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { - return fmt.Errorf("error fetching pool: %w", err) + return errors.Wrap(err, "fetching pool") } return nil } @@ -305,7 +286,7 @@ func (r *Runner) DeleteOrgPool(ctx context.Context, orgID, poolID string) error } if err := r.store.DeleteEntityPool(ctx, entity, poolID); err != nil { - return fmt.Errorf("error deleting pool: %w", err) + return errors.Wrap(err, "deleting pool") } return nil } @@ -314,13 +295,13 @@ func (r *Runner) ListOrgPools(ctx context.Context, orgID string) ([]params.Pool, if !auth.IsAdmin(ctx) { return []params.Pool{}, runnerErrors.ErrUnauthorized } - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: orgID, - EntityType: params.ForgeEntityTypeOrganization, + EntityType: params.GithubEntityTypeOrganization, } pools, err := r.store.ListEntityPools(ctx, entity) if err != nil { - return nil, fmt.Errorf("error fetching pools: %w", err) + return nil, errors.Wrap(err, "fetching pools") } return pools, nil } @@ -330,14 +311,14 @@ func (r *Runner) UpdateOrgPool(ctx context.Context, orgID, poolID string, param return params.Pool{}, runnerErrors.ErrUnauthorized } - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: orgID, - EntityType: params.ForgeEntityTypeOrganization, + EntityType: params.GithubEntityTypeOrganization, } pool, err := r.store.GetEntityPool(ctx, entity, poolID) if err != nil { - return params.Pool{}, fmt.Errorf("error fetching pool: %w", err) + return params.Pool{}, errors.Wrap(err, "fetching pool") } maxRunners := pool.MaxRunners @@ -356,7 +337,7 @@ func (r *Runner) UpdateOrgPool(ctx context.Context, orgID, poolID string, param newPool, err := r.store.UpdateEntityPool(ctx, entity, poolID, param) if err != nil { - return params.Pool{}, fmt.Errorf("error updating pool: %w", err) + return params.Pool{}, errors.Wrap(err, "updating pool") } return newPool, nil } @@ -366,14 +347,14 @@ func (r *Runner) ListOrgInstances(ctx context.Context, orgID string) ([]params.I return nil, runnerErrors.ErrUnauthorized } - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: orgID, - EntityType: params.ForgeEntityTypeOrganization, + EntityType: params.GithubEntityTypeOrganization, } instances, err := r.store.ListEntityInstances(ctx, entity) if err != nil { - return []params.Instance{}, fmt.Errorf("error fetching instances: %w", err) + return []params.Instance{}, errors.Wrap(err, "fetching instances") } return instances, nil } @@ -384,12 +365,12 @@ func (r *Runner) findOrgPoolManager(name, endpointName string) (common.PoolManag org, err := r.store.GetOrganization(r.ctx, name, endpointName) if err != nil { - return nil, fmt.Errorf("error fetching org: %w", err) + return nil, errors.Wrap(err, "fetching org") } poolManager, err := r.poolManagerCtrl.GetOrgPoolManager(org) if err != nil { - return nil, fmt.Errorf("error fetching pool manager for org: %w", err) + return nil, errors.Wrap(err, "fetching pool manager for org") } return poolManager, nil } @@ -401,17 +382,17 @@ func (r *Runner) InstallOrgWebhook(ctx context.Context, orgID string, param para org, err := r.store.GetOrganizationByID(ctx, orgID) if err != nil { - return params.HookInfo{}, fmt.Errorf("error fetching org: %w", err) + return params.HookInfo{}, errors.Wrap(err, "fetching org") } poolMgr, err := r.poolManagerCtrl.GetOrgPoolManager(org) if err != nil { - return params.HookInfo{}, fmt.Errorf("error fetching pool manager for org: %w", err) + return params.HookInfo{}, errors.Wrap(err, "fetching pool manager for org") } info, err := poolMgr.InstallWebhook(ctx, param) if err != nil { - return params.HookInfo{}, fmt.Errorf("error installing webhook: %w", err) + return params.HookInfo{}, errors.Wrap(err, "installing webhook") } return info, nil } @@ -423,16 +404,16 @@ func (r *Runner) UninstallOrgWebhook(ctx context.Context, orgID string) error { org, err := r.store.GetOrganizationByID(ctx, orgID) if err != nil { - return fmt.Errorf("error fetching org: %w", err) + return errors.Wrap(err, "fetching org") } poolMgr, err := r.poolManagerCtrl.GetOrgPoolManager(org) if err != nil { - return fmt.Errorf("error fetching pool manager for org: %w", err) + return errors.Wrap(err, "fetching pool manager for org") } if err := poolMgr.UninstallWebhook(ctx); err != nil { - return fmt.Errorf("error uninstalling webhook: %w", err) + return errors.Wrap(err, "uninstalling webhook") } return nil } @@ -444,17 +425,17 @@ func (r *Runner) GetOrgWebhookInfo(ctx context.Context, orgID string) (params.Ho org, err := r.store.GetOrganizationByID(ctx, orgID) if err != nil { - return params.HookInfo{}, fmt.Errorf("error fetching org: %w", err) + return params.HookInfo{}, errors.Wrap(err, "fetching org") } poolMgr, err := r.poolManagerCtrl.GetOrgPoolManager(org) if err != nil { - return params.HookInfo{}, fmt.Errorf("error fetching pool manager for org: %w", err) + return params.HookInfo{}, errors.Wrap(err, "fetching pool manager for org") } info, err := poolMgr.GetWebhookInfo(ctx) if err != nil { - return params.HookInfo{}, fmt.Errorf("error fetching webhook info: %w", err) + return params.HookInfo{}, errors.Wrap(err, "fetching webhook info") } return info, nil } diff --git a/runner/organizations_test.go b/runner/organizations_test.go index 8d2aa3f6..ae0af3cf 100644 --- a/runner/organizations_test.go +++ b/runner/organizations_test.go @@ -16,10 +16,10 @@ package runner import ( "context" - "errors" "fmt" "testing" + "github.com/pkg/errors" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/suite" @@ -39,7 +39,7 @@ type OrgTestFixtures struct { Store dbCommon.Store StoreOrgs map[string]params.Organization Providers map[string]common.Provider - Credentials map[string]params.ForgeCredentials + Credentials map[string]params.GithubCredentials CreateOrgParams params.CreateOrgParams CreatePoolParams params.CreatePoolParams CreateInstanceParams params.CreateInstanceParams @@ -56,11 +56,9 @@ type OrgTestSuite struct { Fixtures *OrgTestFixtures Runner *Runner - testCreds params.ForgeCredentials - secondaryTestCreds params.ForgeCredentials - giteaTestCreds params.ForgeCredentials - githubEndpoint params.ForgeEndpoint - giteaEndpoint params.ForgeEndpoint + testCreds params.GithubCredentials + secondaryTestCreds params.GithubCredentials + githubEndpoint params.GithubEndpoint } func (s *OrgTestSuite) SetupTest() { @@ -74,9 +72,7 @@ func (s *OrgTestSuite) SetupTest() { adminCtx := garmTesting.ImpersonateAdminContext(context.Background(), db, s.T()) s.githubEndpoint = garmTesting.CreateDefaultGithubEndpoint(adminCtx, db, s.T()) - s.giteaEndpoint = garmTesting.CreateDefaultGiteaEndpoint(adminCtx, db, s.T()) s.testCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "new-creds", db, s.T(), s.githubEndpoint) - s.giteaTestCreds = garmTesting.CreateTestGiteaCredentials(adminCtx, "gitea-creds", db, s.T(), s.giteaEndpoint) s.secondaryTestCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "secondary-creds", db, s.T(), s.githubEndpoint) // create some organization objects in the database, for testing purposes @@ -86,7 +82,7 @@ func (s *OrgTestSuite) SetupTest() { org, err := db.CreateOrganization( adminCtx, name, - s.testCreds, + s.testCreds.Name, fmt.Sprintf("test-webhook-secret-%v", i), params.PoolBalancerTypeRoundRobin, ) @@ -108,7 +104,7 @@ func (s *OrgTestSuite) SetupTest() { Providers: map[string]common.Provider{ "test-provider": providerMock, }, - Credentials: map[string]params.ForgeCredentials{ + Credentials: map[string]params.GithubCredentials{ s.testCreds.Name: s.testCreds, s.secondaryTestCreds.Name: s.secondaryTestCreds, }, @@ -224,7 +220,7 @@ func (s *OrgTestSuite) TestCreateOrganizationPoolMgrFailed() { s.Fixtures.PoolMgrMock.AssertExpectations(s.T()) s.Fixtures.PoolMgrCtrlMock.AssertExpectations(s.T()) - s.Require().Equal(fmt.Sprintf("error creating org pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) + s.Require().Equal(fmt.Sprintf("creating org pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) } func (s *OrgTestSuite) TestCreateOrganizationStartPoolMgrFailed() { @@ -236,80 +232,20 @@ func (s *OrgTestSuite) TestCreateOrganizationStartPoolMgrFailed() { s.Fixtures.PoolMgrMock.AssertExpectations(s.T()) s.Fixtures.PoolMgrCtrlMock.AssertExpectations(s.T()) - s.Require().Equal(fmt.Sprintf("error starting org pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) + s.Require().Equal(fmt.Sprintf("starting org pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) } func (s *OrgTestSuite) TestListOrganizations() { s.Fixtures.PoolMgrCtrlMock.On("GetOrgPoolManager", mock.AnythingOfType("params.Organization")).Return(s.Fixtures.PoolMgrMock, nil) s.Fixtures.PoolMgrMock.On("Status").Return(params.PoolManagerStatus{IsRunning: true}, nil) - orgs, err := s.Runner.ListOrganizations(s.Fixtures.AdminContext, params.OrganizationFilter{}) + orgs, err := s.Runner.ListOrganizations(s.Fixtures.AdminContext) s.Require().Nil(err) garmTesting.EqualDBEntityByName(s.T(), garmTesting.DBEntityMapToSlice(s.Fixtures.StoreOrgs), orgs) } -func (s *OrgTestSuite) TestListOrganizationsWithFilter() { - s.Fixtures.PoolMgrCtrlMock.On("GetOrgPoolManager", mock.AnythingOfType("params.Organization")).Return(s.Fixtures.PoolMgrMock, nil) - s.Fixtures.PoolMgrMock.On("Status").Return(params.PoolManagerStatus{IsRunning: true}, nil) - - org, err := s.Fixtures.Store.CreateOrganization( - s.Fixtures.AdminContext, - "test-org", - s.testCreds, - "super-secret", - params.PoolBalancerTypeRoundRobin) - s.Require().NoError(err) - - org2, err := s.Fixtures.Store.CreateOrganization( - s.Fixtures.AdminContext, - "test-org", - s.giteaTestCreds, - "super-secret", - params.PoolBalancerTypeRoundRobin) - s.Require().NoError(err) - - org3, err := s.Fixtures.Store.CreateOrganization( - s.Fixtures.AdminContext, - "test-org2", - s.giteaTestCreds, - "super-secret", - params.PoolBalancerTypeRoundRobin) - s.Require().NoError(err) - - orgs, err := s.Runner.ListOrganizations( - s.Fixtures.AdminContext, - params.OrganizationFilter{ - Name: "test-org", - }, - ) - - s.Require().Nil(err) - garmTesting.EqualDBEntityByName(s.T(), []params.Organization{org, org2}, orgs) - - orgs, err = s.Runner.ListOrganizations( - s.Fixtures.AdminContext, - params.OrganizationFilter{ - Name: "test-org", - Endpoint: s.giteaEndpoint.Name, - }, - ) - - s.Require().Nil(err) - garmTesting.EqualDBEntityByName(s.T(), []params.Organization{org2}, orgs) - - orgs, err = s.Runner.ListOrganizations( - s.Fixtures.AdminContext, - params.OrganizationFilter{ - Name: "test-org2", - }, - ) - - s.Require().Nil(err) - garmTesting.EqualDBEntityByName(s.T(), []params.Organization{org3}, orgs) -} - func (s *OrgTestSuite) TestListOrganizationsErrUnauthorized() { - _, err := s.Runner.ListOrganizations(context.Background(), params.OrganizationFilter{}) + _, err := s.Runner.ListOrganizations(context.Background()) s.Require().Equal(runnerErrors.ErrUnauthorized, err) } @@ -338,7 +274,7 @@ func (s *OrgTestSuite) TestDeleteOrganization() { s.Require().Nil(err) _, err = s.Fixtures.Store.GetOrganizationByID(s.Fixtures.AdminContext, s.Fixtures.StoreOrgs["test-org-3"].ID) - s.Require().Equal("error fetching org: not found", err.Error()) + s.Require().Equal("fetching org: not found", err.Error()) } func (s *OrgTestSuite) TestDeleteOrganizationErrUnauthorized() { @@ -348,9 +284,9 @@ func (s *OrgTestSuite) TestDeleteOrganizationErrUnauthorized() { } func (s *OrgTestSuite) TestDeleteOrganizationPoolDefinedFailed() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreOrgs["test-org-1"].ID, - EntityType: params.ForgeEntityTypeOrganization, + EntityType: params.GithubEntityTypeOrganization, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -368,7 +304,7 @@ func (s *OrgTestSuite) TestDeleteOrganizationPoolMgrFailed() { err := s.Runner.DeleteOrganization(s.Fixtures.AdminContext, s.Fixtures.StoreOrgs["test-org-1"].ID, true) s.Fixtures.PoolMgrCtrlMock.AssertExpectations(s.T()) - s.Require().Equal(fmt.Sprintf("error deleting org pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) + s.Require().Equal(fmt.Sprintf("deleting org pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) } func (s *OrgTestSuite) TestUpdateOrganization() { @@ -466,9 +402,9 @@ func (s *OrgTestSuite) TestCreateOrgPoolFetchPoolParamsFailed() { } func (s *OrgTestSuite) TestGetOrgPoolByID() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreOrgs["test-org-1"].ID, - EntityType: params.ForgeEntityTypeOrganization, + EntityType: params.GithubEntityTypeOrganization, } orgPool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -488,9 +424,9 @@ func (s *OrgTestSuite) TestGetOrgPoolByIDErrUnauthorized() { } func (s *OrgTestSuite) TestDeleteOrgPool() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreOrgs["test-org-1"].ID, - EntityType: params.ForgeEntityTypeOrganization, + EntityType: params.GithubEntityTypeOrganization, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -502,7 +438,7 @@ func (s *OrgTestSuite) TestDeleteOrgPool() { s.Require().Nil(err) _, err = s.Fixtures.Store.GetEntityPool(s.Fixtures.AdminContext, entity, pool.ID) - s.Require().Equal("fetching pool: error finding pool: not found", err.Error()) + s.Require().Equal("fetching pool: finding pool: not found", err.Error()) } func (s *OrgTestSuite) TestDeleteOrgPoolErrUnauthorized() { @@ -512,9 +448,9 @@ func (s *OrgTestSuite) TestDeleteOrgPoolErrUnauthorized() { } func (s *OrgTestSuite) TestDeleteOrgPoolRunnersFailed() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreOrgs["test-org-1"].ID, - EntityType: params.ForgeEntityTypeOrganization, + EntityType: params.GithubEntityTypeOrganization, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -531,9 +467,9 @@ func (s *OrgTestSuite) TestDeleteOrgPoolRunnersFailed() { } func (s *OrgTestSuite) TestListOrgPools() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreOrgs["test-org-1"].ID, - EntityType: params.ForgeEntityTypeOrganization, + EntityType: params.GithubEntityTypeOrganization, } orgPools := []params.Pool{} for i := 1; i <= 2; i++ { @@ -558,9 +494,9 @@ func (s *OrgTestSuite) TestListOrgPoolsErrUnauthorized() { } func (s *OrgTestSuite) TestUpdateOrgPool() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreOrgs["test-org-1"].ID, - EntityType: params.ForgeEntityTypeOrganization, + EntityType: params.GithubEntityTypeOrganization, } orgPool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -581,9 +517,9 @@ func (s *OrgTestSuite) TestUpdateOrgPoolErrUnauthorized() { } func (s *OrgTestSuite) TestUpdateOrgPoolMinIdleGreaterThanMax() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreOrgs["test-org-1"].ID, - EntityType: params.ForgeEntityTypeOrganization, + EntityType: params.GithubEntityTypeOrganization, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -600,9 +536,9 @@ func (s *OrgTestSuite) TestUpdateOrgPoolMinIdleGreaterThanMax() { } func (s *OrgTestSuite) TestListOrgInstances() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreOrgs["test-org-1"].ID, - EntityType: params.ForgeEntityTypeOrganization, + EntityType: params.GithubEntityTypeOrganization, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { diff --git a/runner/pool/cache.go b/runner/pool/cache.go deleted file mode 100644 index 5a3a3c8c..00000000 --- a/runner/pool/cache.go +++ /dev/null @@ -1,75 +0,0 @@ -package pool - -import ( - "sort" - "strings" - "sync" - "sync/atomic" - - runnerErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm/params" -) - -type poolCacheStore interface { - Next() (params.Pool, error) - Reset() - Len() int -} - -type poolRoundRobin struct { - pools []params.Pool - next uint32 -} - -func (p *poolRoundRobin) Next() (params.Pool, error) { - if len(p.pools) == 0 { - return params.Pool{}, runnerErrors.ErrNoPoolsAvailable - } - - n := atomic.AddUint32(&p.next, 1) - return p.pools[(int(n)-1)%len(p.pools)], nil -} - -func (p *poolRoundRobin) Len() int { - return len(p.pools) -} - -func (p *poolRoundRobin) Reset() { - atomic.StoreUint32(&p.next, 0) -} - -type poolsForTags struct { - pools sync.Map - poolCacheType params.PoolBalancerType -} - -func (p *poolsForTags) Get(tags []string) (poolCacheStore, bool) { - sort.Strings(tags) - key := strings.Join(tags, "^") - - v, ok := p.pools.Load(key) - if !ok { - return nil, false - } - poolCache := v.(*poolRoundRobin) - if p.poolCacheType == params.PoolBalancerTypePack { - // When we service a list of jobs, we want to try each pool in turn - // for each job. Pools are sorted by priority so we always start from the - // highest priority pool and move on to the next if the first one is full. - poolCache.Reset() - } - return poolCache, true -} - -func (p *poolsForTags) Add(tags []string, pools []params.Pool) poolCacheStore { - sort.Slice(pools, func(i, j int) bool { - return pools[i].Priority > pools[j].Priority - }) - - sort.Strings(tags) - key := strings.Join(tags, "^") - - poolRR := &poolRoundRobin{pools: pools} - v, _ := p.pools.LoadOrStore(key, poolRR) - return v.(*poolRoundRobin) -} diff --git a/runner/pool/common.go b/runner/pool/common.go index a41e034d..29c86e27 100644 --- a/runner/pool/common.go +++ b/runner/pool/common.go @@ -1,28 +1,78 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package pool -type RunnerLabels struct { - ID int64 `json:"id,omitempty"` - Name string `json:"name,omitempty"` - Type string `json:"type,omitempty"` +import ( + "context" + "net/http" + "net/url" + "strings" + + "github.com/google/go-github/v72/github" + "github.com/pkg/errors" + + runnerErrors "github.com/cloudbase/garm-provider-common/errors" + "github.com/cloudbase/garm/params" +) + +func validateHookRequest(controllerID, baseURL string, allHooks []*github.Hook, req *github.Hook) error { + parsed, err := url.Parse(baseURL) + if err != nil { + return errors.Wrap(err, "parsing webhook url") + } + + partialMatches := []string{} + for _, hook := range allHooks { + hookURL := strings.ToLower(hook.GetURL()) + + if hook.GetURL() == req.GetURL() { + return runnerErrors.NewConflictError("hook already installed") + } else if strings.Contains(hookURL, controllerID) || strings.Contains(hookURL, parsed.Hostname()) { + partialMatches = append(partialMatches, hook.GetURL()) + } + } + + if len(partialMatches) > 0 { + return runnerErrors.NewConflictError("a webhook containing the controller ID or hostname of this contreoller is already installed on this repository") + } + + return nil } -type forgeRunner struct { - ID int64 `json:"id,omitempty"` - Name string `json:"name,omitempty"` - Status string `json:"status,omitempty"` - Labels []RunnerLabels `json:"labels,omitempty"` +func hookToParamsHookInfo(hook *github.Hook) params.HookInfo { + var insecureSSL bool + var hookURL string + config := hook.GetConfig() + if config != nil { + insecureSSL = config.GetInsecureSSL() == "1" + hookURL = config.GetURL() + } + + return params.HookInfo{ + ID: *hook.ID, + URL: hookURL, + Events: hook.Events, + Active: *hook.Active, + InsecureSSL: insecureSSL, + } +} + +func (r *basePoolManager) listHooks(ctx context.Context) ([]*github.Hook, error) { + opts := github.ListOptions{ + PerPage: 100, + } + var allHooks []*github.Hook + for { + hooks, ghResp, err := r.ghcli.ListEntityHooks(ctx, &opts) + if err != nil { + if ghResp != nil && ghResp.StatusCode == http.StatusNotFound { + return nil, runnerErrors.NewBadRequestError("repository not found or your PAT does not have access to manage webhooks") + } + return nil, errors.Wrap(err, "fetching hooks") + } + allHooks = append(allHooks, hooks...) + if ghResp.NextPage == 0 { + break + } + opts.Page = ghResp.NextPage + } + return allHooks, nil } diff --git a/locking/local_backoff_locker.go b/runner/pool/locking.go similarity index 64% rename from locking/local_backoff_locker.go rename to runner/pool/locking.go index 93344566..7e20d990 100644 --- a/locking/local_backoff_locker.go +++ b/runner/pool/locking.go @@ -1,29 +1,40 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package locking +package pool import ( - "context" "sync" "time" "github.com/cloudbase/garm/runner/common" ) -func NewInstanceDeleteBackoff(_ context.Context) (InstanceDeleteBackoff, error) { - return &instanceDeleteBackoff{}, nil +const ( + maxBackoffSeconds float64 = 1200 // 20 minutes +) + +type keyMutex struct { + muxes sync.Map +} + +func (k *keyMutex) TryLock(key string) bool { + mux, _ := k.muxes.LoadOrStore(key, &sync.Mutex{}) + keyMux := mux.(*sync.Mutex) + return keyMux.TryLock() +} + +func (k *keyMutex) Unlock(key string, remove bool) { + mux, ok := k.muxes.Load(key) + if !ok { + return + } + keyMux := mux.(*sync.Mutex) + if remove { + k.Delete(key) + } + keyMux.Unlock() +} + +func (k *keyMutex) Delete(key string) { + k.muxes.Delete(key) } type instanceBackOff struct { diff --git a/runner/pool/pool.go b/runner/pool/pool.go index eecb500a..0b9f3675 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -17,7 +17,6 @@ package pool import ( "context" "crypto/rand" - "errors" "fmt" "log/slog" "math" @@ -30,33 +29,30 @@ import ( "github.com/google/go-github/v72/github" "github.com/google/uuid" + "github.com/pkg/errors" "golang.org/x/sync/errgroup" runnerErrors "github.com/cloudbase/garm-provider-common/errors" commonParams "github.com/cloudbase/garm-provider-common/params" "github.com/cloudbase/garm-provider-common/util" "github.com/cloudbase/garm/auth" - "github.com/cloudbase/garm/cache" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" - "github.com/cloudbase/garm/locking" "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner/common" garmUtil "github.com/cloudbase/garm/util" - ghClient "github.com/cloudbase/garm/util/github" - "github.com/cloudbase/garm/util/github/scalesets" ) var ( - poolIDLabelprefix = "runner-pool-id" - controllerLabelPrefix = "runner-controller-id" + poolIDLabelprefix = "runner-pool-id:" + controllerLabelPrefix = "runner-controller-id:" // We tag runners that have been spawned as a result of a queued job with the job ID // that spawned them. There is no way to guarantee that the runner spawned in response to a particular // job, will be picked up by that job. We mark them so as in the very likely event that the runner // has picked up a different job, we can clear the lock on the job that spaned it. // The job it picked up would already be transitioned to in_progress so it will be ignored by the // consume loop. - jobLabelPrefix = "in_response_to_job" + jobLabelPrefix = "in_response_to_job:" ) const ( @@ -68,25 +64,20 @@ const ( maxCreateAttempts = 5 ) -func NewEntityPoolManager(ctx context.Context, entity params.ForgeEntity, instanceTokenGetter auth.InstanceTokenGetter, providers map[string]common.Provider, store dbCommon.Store) (common.PoolManager, error) { - ctx = garmUtil.WithSlogContext( - ctx, - slog.Any("pool_mgr", entity.String()), - slog.Any("endpoint", entity.Credentials.Endpoint.Name), - slog.Any("pool_type", entity.EntityType), - ) - ghc, err := ghClient.Client(ctx, entity) +func NewEntityPoolManager(ctx context.Context, entity params.GithubEntity, instanceTokenGetter auth.InstanceTokenGetter, providers map[string]common.Provider, store dbCommon.Store) (common.PoolManager, error) { + ctx = garmUtil.WithContext(ctx, slog.Any("pool_mgr", entity.String()), slog.Any("pool_type", entity.EntityType)) + ghc, err := garmUtil.GithubClient(ctx, entity, entity.Credentials) if err != nil { - return nil, fmt.Errorf("error getting github client: %w", err) + return nil, errors.Wrap(err, "getting github client") } if entity.WebhookSecret == "" { - return nil, fmt.Errorf("webhook secret is empty") + return nil, errors.New("webhook secret is empty") } controllerInfo, err := store.ControllerInfo() if err != nil { - return nil, fmt.Errorf("error getting controller info: %w", err) + return nil, errors.Wrap(err, "getting controller info") } consumerID := fmt.Sprintf("pool-manager-%s-%s", entity.String(), entity.Credentials.Endpoint.Name) @@ -96,28 +87,17 @@ func NewEntityPoolManager(ctx context.Context, entity params.ForgeEntity, instan composeWatcherFilters(entity), ) if err != nil { - return nil, fmt.Errorf("error registering consumer: %w", err) + return nil, errors.Wrap(err, "registering consumer") } wg := &sync.WaitGroup{} - backoff, err := locking.NewInstanceDeleteBackoff(ctx) - if err != nil { - return nil, fmt.Errorf("error creating backoff: %w", err) - } + keyMuxes := &keyMutex{} + backoff := &instanceDeleteBackoff{} - var scaleSetCli *scalesets.ScaleSetClient - if entity.Credentials.ForgeType == params.GithubEndpointType { - scaleSetCli, err = scalesets.NewClient(ghc) - if err != nil { - return nil, fmt.Errorf("failed to get scalesets client: %w", err) - } - } repo := &basePoolManager{ ctx: ctx, - consumerID: consumerID, entity: entity, ghcli: ghc, - scaleSetClient: scaleSetCli, controllerInfo: controllerInfo, instanceTokenGetter: instanceTokenGetter, @@ -125,6 +105,7 @@ func NewEntityPoolManager(ctx context.Context, entity params.ForgeEntity, instan providers: providers, quit: make(chan struct{}), wg: wg, + keyMux: keyMuxes, backoff: backoff, consumer: consumer, } @@ -133,10 +114,8 @@ func NewEntityPoolManager(ctx context.Context, entity params.ForgeEntity, instan type basePoolManager struct { ctx context.Context - consumerID string - entity params.ForgeEntity + entity params.GithubEntity ghcli common.GithubClient - scaleSetClient *scalesets.ScaleSetClient controllerInfo params.ControllerInfo instanceTokenGetter auth.InstanceTokenGetter consumer dbCommon.Consumer @@ -152,7 +131,8 @@ type basePoolManager struct { mux sync.Mutex wg *sync.WaitGroup - backoff locking.InstanceDeleteBackoff + keyMux *keyMutex + backoff *instanceDeleteBackoff } func (r *basePoolManager) getProviderBaseParams(pool params.Pool) common.ProviderBaseParams { @@ -167,8 +147,7 @@ func (r *basePoolManager) getProviderBaseParams(pool params.Pool) common.Provide func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { if err := r.ValidateOwner(job); err != nil { - slog.ErrorContext(r.ctx, "failed to validate owner", "error", err) - return fmt.Errorf("error validating owner: %w", err) + return errors.Wrap(err, "validating owner") } // we see events where the lables seem to be missing. We should ignore these @@ -178,31 +157,34 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { return nil } - jobParams, err := r.paramsWorkflowJobToParamsJob(job) - if err != nil { - slog.ErrorContext(r.ctx, "failed to convert job to params", "error", err) - return fmt.Errorf("error converting job to params: %w", err) - } - + var jobParams params.Job + var err error var triggeredBy int64 defer func() { - if jobParams.WorkflowJobID == 0 { - return - } // we're updating the job in the database, regardless of whether it was successful or not. // or if it was meant for this pool or not. Github will send the same job data to all hierarchies // that have been configured to work with garm. Updating the job at all levels should yield the same // outcome in the db. - _, err := r.store.GetJobByID(r.ctx, jobParams.WorkflowJobID) + if jobParams.ID == 0 { + return + } + + _, err := r.store.GetJobByID(r.ctx, jobParams.ID) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to get job", - "job_id", jobParams.WorkflowJobID) + "job_id", jobParams.ID) return } // This job is new to us. Check if we have a pool that can handle it. - potentialPools := cache.FindPoolsMatchingAllTags(r.entity.ID, jobParams.Labels) + potentialPools, err := r.store.FindPoolsMatchingAllTags(r.ctx, r.entity.EntityType, r.entity.ID, jobParams.Labels) + if err != nil { + slog.With(slog.Any("error", err)).WarnContext( + r.ctx, "failed to find pools matching tags; not recording job", + "requested_tags", strings.Join(jobParams.Labels, ", ")) + return + } if len(potentialPools) == 0 { slog.WarnContext( r.ctx, "no pools matching tags; not recording job", @@ -213,10 +195,10 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { if _, jobErr := r.store.CreateOrUpdateJob(r.ctx, jobParams); jobErr != nil { slog.With(slog.Any("error", jobErr)).ErrorContext( - r.ctx, "failed to update job", "job_id", jobParams.WorkflowJobID) + r.ctx, "failed to update job", "job_id", jobParams.ID) } - if triggeredBy != 0 && jobParams.WorkflowJobID != triggeredBy { + if triggeredBy != 0 && jobParams.ID != triggeredBy { // The triggeredBy value is only set by the "in_progress" webhook. The runner that // transitioned to in_progress was created as a result of a different queued job. If that job is // still queued and we don't remove the lock, it will linger until the lock timeout is reached. @@ -233,7 +215,22 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { case "queued": // Record the job in the database. Queued jobs will be picked up by the consumeQueuedJobs() method // when reconciling. + jobParams, err = r.paramsWorkflowJobToParamsJob(job) + if err != nil { + return errors.Wrap(err, "converting job to params") + } case "completed": + jobParams, err = r.paramsWorkflowJobToParamsJob(job) + if err != nil { + if errors.Is(err, runnerErrors.ErrNotFound) { + // Unassigned jobs will have an empty runner_name. + // We also need to ignore not found errors, as we may get a webhook regarding + // a workflow that is handled by a runner at a different hierarchy level. + return nil + } + return errors.Wrap(err, "converting job to params") + } + // If job was not assigned to a runner, we can ignore it. if jobParams.RunnerName == "" { slog.InfoContext( @@ -241,16 +238,6 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { return nil } - fromCache, ok := cache.GetInstanceCache(jobParams.RunnerName) - if !ok { - return nil - } - - if _, ok := cache.GetEntityPool(r.entity.ID, fromCache.PoolID); !ok { - slog.DebugContext(r.ctx, "instance belongs to a pool not managed by this entity", "pool_id", fromCache.PoolID) - return nil - } - // update instance workload state. if _, err := r.setInstanceRunnerStatus(jobParams.RunnerName, params.RunnerTerminated); err != nil { if errors.Is(err, runnerErrors.ErrNotFound) { @@ -259,7 +246,7 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to update runner status", "runner_name", util.SanitizeLogEntry(jobParams.RunnerName)) - return fmt.Errorf("error updating runner: %w", err) + return errors.Wrap(err, "updating runner") } slog.DebugContext( r.ctx, "marking instance as pending_delete", @@ -271,20 +258,22 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to update runner status", "runner_name", util.SanitizeLogEntry(jobParams.RunnerName)) - return fmt.Errorf("error updating runner: %w", err) + return errors.Wrap(err, "updating runner") } case "in_progress": - fromCache, ok := cache.GetInstanceCache(jobParams.RunnerName) - if !ok { - slog.DebugContext(r.ctx, "instance not found in cache", "runner_name", jobParams.RunnerName) - return nil + jobParams, err = r.paramsWorkflowJobToParamsJob(job) + if err != nil { + if errors.Is(err, runnerErrors.ErrNotFound) { + // This is most likely a runner we're not managing. If we define a repo from within an org + // and also define that same org, we will get a hook from github from both the repo and the org + // regarding the same workflow. We look for the runner in the database, and make sure it exists and is + // part of a pool that this manager is responsible for. A not found error here will most likely mean + // that we are not responsible for that runner, and we should ignore it. + return nil + } + return errors.Wrap(err, "converting job to params") } - pool, ok := cache.GetEntityPool(r.entity.ID, fromCache.PoolID) - if !ok { - slog.DebugContext(r.ctx, "instance belongs to a pool not managed by this entity", "pool_id", fromCache.PoolID) - return nil - } // update instance workload state. instance, err := r.setInstanceRunnerStatus(jobParams.RunnerName, params.RunnerActive) if err != nil { @@ -294,13 +283,17 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to update runner status", "runner_name", util.SanitizeLogEntry(jobParams.RunnerName)) - return fmt.Errorf("error updating runner: %w", err) + return errors.Wrap(err, "updating runner") } // Set triggeredBy here so we break the lock on any potential queued job. triggeredBy = jobIDFromLabels(instance.AditionalLabels) // A runner has picked up the job, and is now running it. It may need to be replaced if the pool has // a minimum number of idle runners configured. + pool, err := r.store.GetEntityPool(r.ctx, r.entity, instance.PoolID) + if err != nil { + return errors.Wrap(err, "getting pool") + } if err := r.ensureIdleRunnersForOnePool(pool); err != nil { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "error ensuring idle runners for pool", @@ -313,8 +306,7 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { func jobIDFromLabels(labels []string) int64 { for _, lbl := range labels { if strings.HasPrefix(lbl, jobLabelPrefix) { - trimLength := min(len(jobLabelPrefix)+1, len(lbl)) - jobID, err := strconv.ParseInt(lbl[trimLength:], 10, 64) + jobID, err := strconv.ParseInt(lbl[len(jobLabelPrefix):], 10, 64) if err != nil { return 0 } @@ -353,7 +345,7 @@ func (r *basePoolManager) startLoopForFunction(f func() error, interval time.Dur r.ctx, "error in loop", "loop_name", name) if errors.Is(err, runnerErrors.ErrUnauthorized) { - r.SetPoolRunningState(false, err.Error()) + r.setPoolRunningState(false, err.Error()) } } case <-r.ctx.Done(): @@ -379,21 +371,21 @@ func (r *basePoolManager) startLoopForFunction(f func() error, interval time.Dur } func (r *basePoolManager) updateTools() error { - tools, err := cache.GetGithubToolsCache(r.entity.ID) + // Update tools cache. + tools, err := r.FetchTools() if err != nil { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to update tools for entity", "entity", r.entity.String()) - r.SetPoolRunningState(false, err.Error()) + r.setPoolRunningState(false, err.Error()) return fmt.Errorf("failed to update tools for entity %s: %w", r.entity.String(), err) } - r.mux.Lock() r.tools = tools r.mux.Unlock() slog.DebugContext(r.ctx, "successfully updated tools") - r.SetPoolRunningState(true, "") - return nil + r.setPoolRunningState(true, "") + return err } // cleanupOrphanedProviderRunners compares runners in github with local runners and removes @@ -403,10 +395,10 @@ func (r *basePoolManager) updateTools() error { // happens, github will remove the ephemeral worker and send a webhook our way. // If we were offline and did not process the webhook, the instance will linger. // We need to remove it from the provider and database. -func (r *basePoolManager) cleanupOrphanedProviderRunners(runners []forgeRunner) error { +func (r *basePoolManager) cleanupOrphanedProviderRunners(runners []*github.Runner) error { dbInstances, err := r.store.ListEntityInstances(r.ctx, r.entity) if err != nil { - return fmt.Errorf("error fetching instances from db: %w", err) + return errors.Wrap(err, "fetching instances from db") } runnerNames := map[string]bool{} @@ -414,26 +406,21 @@ func (r *basePoolManager) cleanupOrphanedProviderRunners(runners []forgeRunner) if !isManagedRunner(labelsFromRunner(run), r.controllerInfo.ControllerID.String()) { slog.DebugContext( r.ctx, "runner is not managed by a pool we manage", - "runner_name", run.Name) + "runner_name", run.GetName()) continue } - runnerNames[run.Name] = true + runnerNames[*run.Name] = true } for _, instance := range dbInstances { - if instance.ScaleSetID != 0 { - // ignore scale set instances. - continue - } - - lockAcquired := locking.TryLock(instance.Name, r.consumerID) + lockAcquired := r.keyMux.TryLock(instance.Name) if !lockAcquired { slog.DebugContext( r.ctx, "failed to acquire lock for instance", "runner_name", instance.Name) continue } - defer locking.Unlock(instance.Name, false) + defer r.keyMux.Unlock(instance.Name, false) switch instance.Status { case commonParams.InstancePendingCreate, @@ -443,9 +430,10 @@ func (r *basePoolManager) cleanupOrphanedProviderRunners(runners []forgeRunner) // github so we let them be for now. continue } + pool, err := r.store.GetEntityPool(r.ctx, r.entity, instance.PoolID) if err != nil { - return fmt.Errorf("error fetching instance pool info: %w", err) + return errors.Wrap(err, "fetching instance pool info") } switch instance.RunnerStatus { @@ -473,7 +461,7 @@ func (r *basePoolManager) cleanupOrphanedProviderRunners(runners []forgeRunner) slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to update runner", "runner_name", instance.Name) - return fmt.Errorf("error updating runner: %w", err) + return errors.Wrap(err, "updating runner") } } } @@ -483,44 +471,39 @@ func (r *basePoolManager) cleanupOrphanedProviderRunners(runners []forgeRunner) // reapTimedOutRunners will mark as pending_delete any runner that has a status // of "running" in the provider, but that has not registered with Github, and has // received no new updates in the configured timeout interval. -func (r *basePoolManager) reapTimedOutRunners(runners []forgeRunner) error { +func (r *basePoolManager) reapTimedOutRunners(runners []*github.Runner) error { dbInstances, err := r.store.ListEntityInstances(r.ctx, r.entity) if err != nil { - return fmt.Errorf("error fetching instances from db: %w", err) + return errors.Wrap(err, "fetching instances from db") } - runnersByName := map[string]forgeRunner{} + runnersByName := map[string]*github.Runner{} for _, run := range runners { if !isManagedRunner(labelsFromRunner(run), r.controllerInfo.ControllerID.String()) { slog.DebugContext( r.ctx, "runner is not managed by a pool we manage", - "runner_name", run.Name) + "runner_name", run.GetName()) continue } - runnersByName[run.Name] = run + runnersByName[*run.Name] = run } for _, instance := range dbInstances { - if instance.ScaleSetID != 0 { - // ignore scale set instances. - continue - } - slog.DebugContext( r.ctx, "attempting to lock instance", "runner_name", instance.Name) - lockAcquired := locking.TryLock(instance.Name, r.consumerID) + lockAcquired := r.keyMux.TryLock(instance.Name) if !lockAcquired { slog.DebugContext( r.ctx, "failed to acquire lock for instance", "runner_name", instance.Name) continue } - defer locking.Unlock(instance.Name, false) + defer r.keyMux.Unlock(instance.Name, false) pool, err := r.store.GetEntityPool(r.ctx, r.entity, instance.PoolID) if err != nil { - return fmt.Errorf("error fetching instance pool info: %w", err) + return errors.Wrap(err, "fetching instance pool info") } if time.Since(instance.UpdatedAt).Minutes() < float64(pool.RunnerTimeout()) { continue @@ -531,7 +514,7 @@ func (r *basePoolManager) reapTimedOutRunners(runners []forgeRunner) error { // * The runner managed to join github, but the setup process failed later and the runner // never started on the instance. // * A JIT config was created, but the runner never joined github. - if runner, ok := runnersByName[instance.Name]; !ok || runner.Status == "offline" { + if runner, ok := runnersByName[instance.Name]; !ok || runner.GetStatus() == "offline" { slog.InfoContext( r.ctx, "reaping timed-out/failed runner", "runner_name", instance.Name) @@ -539,7 +522,7 @@ func (r *basePoolManager) reapTimedOutRunners(runners []forgeRunner) error { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to update runner status", "runner_name", instance.Name) - return fmt.Errorf("error updating runner: %w", err) + return errors.Wrap(err, "updating runner") } } } @@ -550,46 +533,43 @@ func (r *basePoolManager) reapTimedOutRunners(runners []forgeRunner) error { // as offline and for which we no longer have a local instance. // This may happen if someone manually deletes the instance in the provider. We need to // first remove the instance from github, and then from our database. -func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []forgeRunner) error { +func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []*github.Runner) error { poolInstanceCache := map[string][]commonParams.ProviderInstance{} g, ctx := errgroup.WithContext(r.ctx) for _, runner := range runners { if !isManagedRunner(labelsFromRunner(runner), r.controllerInfo.ControllerID.String()) { slog.DebugContext( r.ctx, "runner is not managed by a pool we manage", - "runner_name", runner.Name) + "runner_name", runner.GetName()) continue } - status := runner.Status + status := runner.GetStatus() if status != "offline" { // Runner is online. Ignore it. continue } - dbInstance, err := r.store.GetInstance(r.ctx, runner.Name) + dbInstance, err := r.store.GetInstanceByName(r.ctx, *runner.Name) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { - return fmt.Errorf("error fetching instance from DB: %w", err) + return errors.Wrap(err, "fetching instance from DB") } // We no longer have a DB entry for this instance, and the runner appears offline in github. // Previous forceful removal may have failed? slog.InfoContext( r.ctx, "Runner has no database entry in garm, removing from github", - "runner_name", runner.Name) - if err := r.ghcli.RemoveEntityRunner(r.ctx, runner.ID); err != nil { + "runner_name", runner.GetName()) + resp, err := r.ghcli.RemoveEntityRunner(r.ctx, runner.GetID()) + if err != nil { // Removed in the meantime? - if errors.Is(err, runnerErrors.ErrNotFound) { + if resp != nil && resp.StatusCode == http.StatusNotFound { continue } - return fmt.Errorf("error removing runner: %w", err) + return errors.Wrap(err, "removing runner") } continue } - if dbInstance.ScaleSetID != 0 { - // ignore scale set instances. - continue - } switch dbInstance.Status { case commonParams.InstancePendingDelete, commonParams.InstanceDeleting: @@ -616,17 +596,17 @@ func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []forgeRunner) er pool, err := r.store.GetEntityPool(r.ctx, r.entity, dbInstance.PoolID) if err != nil { - return fmt.Errorf("error fetching pool: %w", err) + return errors.Wrap(err, "fetching pool") } // check if the provider still has the instance. - provider, ok := r.providers[dbInstance.ProviderName] + provider, ok := r.providers[pool.ProviderName] if !ok { - return fmt.Errorf("unknown provider %s for pool %s", dbInstance.ProviderName, dbInstance.PoolID) + return fmt.Errorf("unknown provider %s for pool %s", pool.ProviderName, pool.ID) } var poolInstances []commonParams.ProviderInstance - poolInstances, ok = poolInstanceCache[dbInstance.PoolID] + poolInstances, ok = poolInstanceCache[pool.ID] if !ok { slog.DebugContext( r.ctx, "updating instances cache for pool", @@ -638,12 +618,12 @@ func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []forgeRunner) er } poolInstances, err = provider.ListInstances(r.ctx, pool.ID, listInstancesParams) if err != nil { - return fmt.Errorf("error fetching instances for pool %s: %w", dbInstance.PoolID, err) + return errors.Wrapf(err, "fetching instances for pool %s", pool.ID) } - poolInstanceCache[dbInstance.PoolID] = poolInstances + poolInstanceCache[pool.ID] = poolInstances } - lockAcquired := locking.TryLock(dbInstance.Name, r.consumerID) + lockAcquired := r.keyMux.TryLock(dbInstance.Name) if !lockAcquired { slog.DebugContext( r.ctx, "failed to acquire lock for instance", @@ -656,7 +636,7 @@ func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []forgeRunner) er g.Go(func() error { deleteMux := false defer func() { - locking.Unlock(dbInstance.Name, deleteMux) + r.keyMux.Unlock(dbInstance.Name, deleteMux) }() providerInstance, ok := instanceInList(dbInstance.Name, poolInstances) if !ok { @@ -665,14 +645,15 @@ func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []forgeRunner) er slog.InfoContext( r.ctx, "Runner instance is no longer on the provider, removing from github", "runner_name", dbInstance.Name) - if err := r.ghcli.RemoveEntityRunner(r.ctx, runner.ID); err != nil { + resp, err := r.ghcli.RemoveEntityRunner(r.ctx, runner.GetID()) + if err != nil { // Removed in the meantime? - if errors.Is(err, runnerErrors.ErrNotFound) { + if resp != nil && resp.StatusCode == http.StatusNotFound { slog.DebugContext( r.ctx, "runner disappeared from github", "runner_name", dbInstance.Name) } else { - return fmt.Errorf("error removing runner from github: %w", err) + return errors.Wrap(err, "removing runner from github") } } // Remove the database entry for the runner. @@ -680,7 +661,7 @@ func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []forgeRunner) er r.ctx, "Removing from database", "runner_name", dbInstance.Name) if err := r.store.DeleteInstance(ctx, dbInstance.PoolID, dbInstance.Name); err != nil { - return fmt.Errorf("error removing runner from database: %w", err) + return errors.Wrap(err, "removing runner from database") } deleteMux = true return nil @@ -706,13 +687,13 @@ func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []forgeRunner) er }, } if err := provider.Start(r.ctx, dbInstance.ProviderID, startParams); err != nil { - return fmt.Errorf("error starting instance %s: %w", dbInstance.ProviderID, err) + return errors.Wrapf(err, "starting instance %s", dbInstance.ProviderID) } return nil }) } if err := r.waitForErrorGroupOrContextCancelled(g); err != nil { - return fmt.Errorf("error removing orphaned github runners: %w", err) + return errors.Wrap(err, "removing orphaned github runners") } return nil } @@ -742,7 +723,7 @@ func (r *basePoolManager) setInstanceRunnerStatus(runnerName string, status para } instance, err := r.store.UpdateInstance(r.ctx, runnerName, updateParams) if err != nil { - return params.Instance{}, fmt.Errorf("error updating runner state: %w", err) + return params.Instance{}, errors.Wrap(err, "updating runner state") } return instance, nil } @@ -755,7 +736,7 @@ func (r *basePoolManager) setInstanceStatus(runnerName string, status commonPara instance, err := r.store.UpdateInstance(r.ctx, runnerName, updateParams) if err != nil { - return params.Instance{}, fmt.Errorf("error updating runner state: %w", err) + return params.Instance{}, errors.Wrap(err, "updating runner state") } return instance, nil } @@ -763,7 +744,7 @@ func (r *basePoolManager) setInstanceStatus(runnerName string, status commonPara func (r *basePoolManager) AddRunner(ctx context.Context, poolID string, aditionalLabels []string) (err error) { pool, err := r.store.GetEntityPool(r.ctx, r.entity, poolID) if err != nil { - return fmt.Errorf("error fetching pool: %w", err) + return errors.Wrap(err, "fetching pool") } provider, ok := r.providers[pool.ProviderName] @@ -777,10 +758,12 @@ func (r *basePoolManager) AddRunner(ctx context.Context, poolID string, aditiona jitConfig := make(map[string]string) var runner *github.Runner - if !provider.DisableJITConfig() && r.entity.Credentials.ForgeType != params.GiteaEndpointType { + if !provider.DisableJITConfig() { + // Attempt to create JIT config jitConfig, runner, err = r.ghcli.GetEntityJITConfig(ctx, name, pool, labels) if err != nil { - return fmt.Errorf("failed to generate JIT config: %w", err) + slog.With(slog.Any("error", err)).ErrorContext( + ctx, "failed to get JIT config, falling back to registration token") } } @@ -804,7 +787,7 @@ func (r *basePoolManager) AddRunner(ctx context.Context, poolID string, aditiona instance, err := r.store.CreateInstance(r.ctx, poolID, createParams) if err != nil { - return fmt.Errorf("error creating instance: %w", err) + return errors.Wrap(err, "creating instance") } defer func() { @@ -818,7 +801,7 @@ func (r *basePoolManager) AddRunner(ctx context.Context, poolID string, aditiona } if runner != nil { - runnerCleanupErr := r.ghcli.RemoveEntityRunner(r.ctx, runner.GetID()) + _, runnerCleanupErr := r.ghcli.RemoveEntityRunner(r.ctx, runner.GetID()) if err != nil { slog.With(slog.Any("error", runnerCleanupErr)).ErrorContext( ctx, "failed to remove runner", @@ -852,7 +835,7 @@ func (r *basePoolManager) waitForTimeoutOrCancelled(timeout time.Duration) { } } -func (r *basePoolManager) SetPoolRunningState(isRunning bool, failureReason string) { +func (r *basePoolManager) setPoolRunningState(isRunning bool, failureReason string) { r.mux.Lock() r.managerErrorReason = failureReason r.managerIsRunning = isRunning @@ -872,7 +855,7 @@ func (r *basePoolManager) getLabelsForInstance(pool params.Pool) []string { func (r *basePoolManager) addInstanceToProvider(instance params.Instance) error { pool, err := r.store.GetEntityPool(r.ctx, r.entity, instance.PoolID) if err != nil { - return fmt.Errorf("error fetching pool: %w", err) + return errors.Wrap(err, "fetching pool") } provider, ok := r.providers[pool.ProviderName] @@ -882,9 +865,10 @@ func (r *basePoolManager) addInstanceToProvider(instance params.Instance) error jwtValidity := pool.RunnerTimeout() - jwtToken, err := r.instanceTokenGetter.NewInstanceJWTToken(instance, r.entity, pool.PoolType(), jwtValidity) + entity := r.entity.String() + jwtToken, err := r.instanceTokenGetter.NewInstanceJWTToken(instance, entity, pool.PoolType(), jwtValidity) if err != nil { - return fmt.Errorf("error fetching instance jwt token: %w", err) + return errors.Wrap(err, "fetching instance jwt token") } hasJITConfig := len(instance.JitConfiguration) > 0 @@ -892,7 +876,7 @@ func (r *basePoolManager) addInstanceToProvider(instance params.Instance) error bootstrapArgs := commonParams.BootstrapInstance{ Name: instance.Name, Tools: r.tools, - RepoURL: r.entity.ForgeURL(), + RepoURL: r.GithubURL(), MetadataURL: instance.MetadataURL, CallbackURL: instance.CallbackURL, InstanceToken: jwtToken, @@ -941,7 +925,7 @@ func (r *basePoolManager) addInstanceToProvider(instance params.Instance) error providerInstance, err := provider.CreateInstance(r.ctx, bootstrapArgs, createInstanceParams) if err != nil { instanceIDToDelete = instance.Name - return fmt.Errorf("error creating instance: %w", err) + return errors.Wrap(err, "creating instance") } if providerInstance.Status == commonParams.InstanceError { @@ -953,7 +937,7 @@ func (r *basePoolManager) addInstanceToProvider(instance params.Instance) error updateInstanceArgs := r.updateArgsFromProviderInstance(providerInstance) if _, err := r.store.UpdateInstance(r.ctx, instance.Name, updateInstanceArgs); err != nil { - return fmt.Errorf("error updating instance: %w", err) + return errors.Wrap(err, "updating instance") } return nil } @@ -974,11 +958,11 @@ func (r *basePoolManager) addInstanceToProvider(instance params.Instance) error func (r *basePoolManager) paramsWorkflowJobToParamsJob(job params.WorkflowJob) (params.Job, error) { asUUID, err := uuid.Parse(r.ID()) if err != nil { - return params.Job{}, fmt.Errorf("error parsing pool ID as UUID: %w", err) + return params.Job{}, errors.Wrap(err, "parsing pool ID as UUID") } jobParams := params.Job{ - WorkflowJobID: job.WorkflowJob.ID, + ID: job.WorkflowJob.ID, Action: job.Action, RunID: job.WorkflowJob.RunID, Status: job.WorkflowJob.Status, @@ -996,25 +980,25 @@ func (r *basePoolManager) paramsWorkflowJobToParamsJob(job params.WorkflowJob) ( } switch r.entity.EntityType { - case params.ForgeEntityTypeEnterprise: + case params.GithubEntityTypeEnterprise: jobParams.EnterpriseID = &asUUID - case params.ForgeEntityTypeRepository: + case params.GithubEntityTypeRepository: jobParams.RepoID = &asUUID - case params.ForgeEntityTypeOrganization: + case params.GithubEntityTypeOrganization: jobParams.OrgID = &asUUID default: - return jobParams, fmt.Errorf("unknown pool type: %s", r.entity.EntityType) + return jobParams, errors.Errorf("unknown pool type: %s", r.entity.EntityType) } return jobParams, nil } func (r *basePoolManager) poolLabel(poolID string) string { - return fmt.Sprintf("%s=%s", poolIDLabelprefix, poolID) + return fmt.Sprintf("%s%s", poolIDLabelprefix, poolID) } func (r *basePoolManager) controllerLabel() string { - return fmt.Sprintf("%s=%s", controllerLabelPrefix, r.controllerInfo.ControllerID.String()) + return fmt.Sprintf("%s%s", controllerLabelPrefix, r.controllerInfo.ControllerID.String()) } func (r *basePoolManager) updateArgsFromProviderInstance(providerInstance commonParams.ProviderInstance) params.UpdateInstanceParams { @@ -1059,7 +1043,7 @@ func (r *basePoolManager) scaleDownOnePool(ctx context.Context, pool params.Pool return nil } - surplus := float64(len(idleWorkers) - pool.MinIdleRunnersAsInt()) + surplus := float64(len(idleWorkers) - int(pool.MinIdleRunners)) if surplus <= 0 { return nil @@ -1077,14 +1061,14 @@ func (r *basePoolManager) scaleDownOnePool(ctx context.Context, pool params.Pool for _, instanceToDelete := range idleWorkers[:numScaleDown] { instanceToDelete := instanceToDelete - lockAcquired := locking.TryLock(instanceToDelete.Name, r.consumerID) + lockAcquired := r.keyMux.TryLock(instanceToDelete.Name) if !lockAcquired { slog.With(slog.Any("error", err)).ErrorContext( ctx, "failed to acquire lock for instance", "provider_id", instanceToDelete.Name) continue } - defer locking.Unlock(instanceToDelete.Name, false) + defer r.keyMux.Unlock(instanceToDelete.Name, false) g.Go(func() error { slog.InfoContext( @@ -1109,15 +1093,15 @@ func (r *basePoolManager) scaleDownOnePool(ctx context.Context, pool params.Pool // instead of returning a bunch of results and filtering manually. queued, err := r.store.ListEntityJobsByStatus(r.ctx, r.entity.EntityType, r.entity.ID, params.JobStatusQueued) if err != nil && !errors.Is(err, runnerErrors.ErrNotFound) { - return fmt.Errorf("error listing queued jobs: %w", err) + return errors.Wrap(err, "listing queued jobs") } for _, job := range queued { if time.Since(job.CreatedAt).Minutes() > 10 && pool.HasRequiredLabels(job.Labels) { - if err := r.store.DeleteJob(ctx, job.WorkflowJobID); err != nil && !errors.Is(err, runnerErrors.ErrNotFound) { + if err := r.store.DeleteJob(ctx, job.ID); err != nil && !errors.Is(err, runnerErrors.ErrNotFound) { slog.With(slog.Any("error", err)).ErrorContext( ctx, "failed to delete job", - "job_id", job.WorkflowJobID) + "job_id", job.ID) } } } @@ -1139,7 +1123,7 @@ func (r *basePoolManager) addRunnerToPool(pool params.Pool, aditionalLabels []st return fmt.Errorf("failed to list pool instances: %w", err) } - if poolInstanceCount >= int64(pool.MaxRunnersAsInt()) { + if poolInstanceCount >= int64(pool.MaxRunners) { return fmt.Errorf("max workers (%d) reached for pool %s", pool.MaxRunners, pool.ID) } @@ -1175,19 +1159,14 @@ func (r *basePoolManager) ensureIdleRunnersForOnePool(pool params.Pool) error { } var required int - if len(idleOrPendingWorkers) < pool.MinIdleRunnersAsInt() { + if len(idleOrPendingWorkers) < int(pool.MinIdleRunners) { // get the needed delta. - required = pool.MinIdleRunnersAsInt() - len(idleOrPendingWorkers) + required = int(pool.MinIdleRunners) - len(idleOrPendingWorkers) projectedInstanceCount := len(existingInstances) + required - - var projected uint - if projectedInstanceCount > 0 { - projected = uint(projectedInstanceCount) - } - if projected > pool.MaxRunners { + if uint(projectedInstanceCount) > pool.MaxRunners { // ensure we don't go above max workers - delta := projectedInstanceCount - pool.MaxRunnersAsInt() + delta := projectedInstanceCount - int(pool.MaxRunners) required -= delta } } @@ -1230,7 +1209,7 @@ func (r *basePoolManager) retryFailedInstancesForOnePool(ctx context.Context, po slog.DebugContext( ctx, "attempting to retry failed instance", "runner_name", instance.Name) - lockAcquired := locking.TryLock(instance.Name, r.consumerID) + lockAcquired := r.keyMux.TryLock(instance.Name) if !lockAcquired { slog.DebugContext( ctx, "failed to acquire lock for instance", @@ -1239,7 +1218,7 @@ func (r *basePoolManager) retryFailedInstancesForOnePool(ctx context.Context, po } g.Go(func() error { - defer locking.Unlock(instance.Name, false) + defer r.keyMux.Unlock(instance.Name, false) slog.DebugContext( ctx, "attempting to clean up any previous instance", "runner_name", instance.Name) @@ -1293,7 +1272,10 @@ func (r *basePoolManager) retryFailedInstancesForOnePool(ctx context.Context, po } func (r *basePoolManager) retryFailedInstances() error { - pools := cache.GetEntityPools(r.entity.ID) + pools, err := r.store.ListEntityPools(r.ctx, r.entity) + if err != nil { + return fmt.Errorf("error listing pools: %w", err) + } g, ctx := errgroup.WithContext(r.ctx) for _, pool := range pools { pool := pool @@ -1313,7 +1295,10 @@ func (r *basePoolManager) retryFailedInstances() error { } func (r *basePoolManager) scaleDown() error { - pools := cache.GetEntityPools(r.entity.ID) + pools, err := r.store.ListEntityPools(r.ctx, r.entity) + if err != nil { + return fmt.Errorf("error listing pools: %w", err) + } g, ctx := errgroup.WithContext(r.ctx) for _, pool := range pools { pool := pool @@ -1331,7 +1316,11 @@ func (r *basePoolManager) scaleDown() error { } func (r *basePoolManager) ensureMinIdleRunners() error { - pools := cache.GetEntityPools(r.entity.ID) + pools, err := r.store.ListEntityPools(r.ctx, r.entity) + if err != nil { + return fmt.Errorf("error listing pools: %w", err) + } + g, _ := errgroup.WithContext(r.ctx) for _, pool := range pools { pool := pool @@ -1349,12 +1338,12 @@ func (r *basePoolManager) ensureMinIdleRunners() error { func (r *basePoolManager) deleteInstanceFromProvider(ctx context.Context, instance params.Instance) error { pool, err := r.store.GetEntityPool(r.ctx, r.entity, instance.PoolID) if err != nil { - return fmt.Errorf("error fetching pool: %w", err) + return errors.Wrap(err, "fetching pool") } - provider, ok := r.providers[instance.ProviderName] + provider, ok := r.providers[pool.ProviderName] if !ok { - return fmt.Errorf("unknown provider %s for pool %s", instance.ProviderName, instance.PoolID) + return fmt.Errorf("unknown provider %s for pool %s", pool.ProviderName, pool.ID) } identifier := instance.ProviderID @@ -1375,28 +1364,12 @@ func (r *basePoolManager) deleteInstanceFromProvider(ctx context.Context, instan }, } if err := provider.DeleteInstance(ctx, identifier, deleteInstanceParams); err != nil { - return fmt.Errorf("error removing instance: %w", err) + return errors.Wrap(err, "removing instance") } return nil } -func (r *basePoolManager) sleepWithCancel(sleepTime time.Duration) (canceled bool) { - if sleepTime == 0 { - return false - } - ticker := time.NewTicker(sleepTime) - defer ticker.Stop() - - select { - case <-ticker.C: - return false - case <-r.quit: - case <-r.ctx.Done(): - } - return true -} - func (r *basePoolManager) deletePendingInstances() error { instances, err := r.store.ListEntityInstances(r.ctx, r.entity) if err != nil { @@ -1406,11 +1379,6 @@ func (r *basePoolManager) deletePendingInstances() error { slog.DebugContext( r.ctx, "removing instances in pending_delete") for _, instance := range instances { - if instance.ScaleSetID != 0 { - // instance is part of a scale set. Skip. - continue - } - if instance.Status != commonParams.InstancePendingDelete && instance.Status != commonParams.InstancePendingForceDelete { // not in pending_delete status. Skip. continue @@ -1420,7 +1388,7 @@ func (r *basePoolManager) deletePendingInstances() error { r.ctx, "removing instance from pool", "runner_name", instance.Name, "pool_id", instance.PoolID) - lockAcquired := locking.TryLock(instance.Name, r.consumerID) + lockAcquired := r.keyMux.TryLock(instance.Name) if !lockAcquired { slog.InfoContext( r.ctx, "failed to acquire lock for instance", @@ -1433,7 +1401,7 @@ func (r *basePoolManager) deletePendingInstances() error { slog.DebugContext( r.ctx, "backoff in effect for instance", "runner_name", instance.Name, "deadline", deadline) - locking.Unlock(instance.Name, false) + r.keyMux.Unlock(instance.Name, false) continue } @@ -1445,14 +1413,12 @@ func (r *basePoolManager) deletePendingInstances() error { return fmt.Errorf("failed to generate random number: %w", err) } jitter := time.Duration(num.Int64()) * time.Millisecond - if canceled := r.sleepWithCancel(jitter); canceled { - return nil - } + time.Sleep(jitter) currentStatus := instance.Status deleteMux := false defer func() { - locking.Unlock(instance.Name, deleteMux) + r.keyMux.Unlock(instance.Name, deleteMux) if deleteMux { // deleteMux is set only when the instance was successfully removed. // We can use it as a marker to signal that the backoff is no longer @@ -1520,11 +1486,6 @@ func (r *basePoolManager) addPendingInstances() error { return fmt.Errorf("failed to fetch instances from store: %w", err) } for _, instance := range instances { - if instance.ScaleSetID != 0 { - // instance is part of a scale set. Skip. - continue - } - if instance.Status != commonParams.InstancePendingCreate { // not in pending_create status. Skip. continue @@ -1534,7 +1495,7 @@ func (r *basePoolManager) addPendingInstances() error { r.ctx, "attempting to acquire lock for instance", "runner_name", instance.Name, "action", "create_pending") - lockAcquired := locking.TryLock(instance.Name, r.consumerID) + lockAcquired := r.keyMux.TryLock(instance.Name) if !lockAcquired { slog.DebugContext( r.ctx, "failed to acquire lock for instance", @@ -1548,14 +1509,14 @@ func (r *basePoolManager) addPendingInstances() error { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to update runner status", "runner_name", instance.Name) - locking.Unlock(instance.Name, false) + r.keyMux.Unlock(instance.Name, false) // We failed to transition the instance to Creating. This means that garm will retry to create this instance // when the loop runs again and we end up with multiple instances. continue } go func(instance params.Instance) { - defer locking.Unlock(instance.Name, false) + defer r.keyMux.Unlock(instance.Name, false) slog.InfoContext( r.ctx, "creating instance in pool", "runner_name", instance.Name, @@ -1591,7 +1552,7 @@ func (r *basePoolManager) Wait() error { select { case <-done: case <-timer.C: - return runnerErrors.NewTimeoutError("waiting for pool to stop") + return errors.Wrap(runnerErrors.ErrTimeout, "waiting for pool to stop") } return nil } @@ -1615,13 +1576,13 @@ func (r *basePoolManager) runnerCleanup() (err error) { return nil } -func (r *basePoolManager) cleanupOrphanedRunners(runners []forgeRunner) error { +func (r *basePoolManager) cleanupOrphanedRunners(runners []*github.Runner) error { if err := r.cleanupOrphanedProviderRunners(runners); err != nil { - return fmt.Errorf("error cleaning orphaned instances: %w", err) + return errors.Wrap(err, "cleaning orphaned instances") } if err := r.cleanupOrphanedGithubRunners(runners); err != nil { - return fmt.Errorf("error cleaning orphaned github runners: %w", err) + return errors.Wrap(err, "cleaning orphaned github runners") } return nil @@ -1631,16 +1592,6 @@ func (r *basePoolManager) Start() error { initialToolUpdate := make(chan struct{}, 1) go func() { slog.Info("running initial tool update") - for { - slog.DebugContext(r.ctx, "waiting for tools to be available") - hasTools, stopped := r.waitForToolsOrCancel() - if stopped { - return - } - if hasTools { - break - } - } if err := r.updateTools(); err != nil { slog.With(slog.Any("error", err)).Error("failed to update tools") } @@ -1689,22 +1640,45 @@ func (r *basePoolManager) DeleteRunner(runner params.Instance, forceRemove, bypa if !r.managerIsRunning && !bypassGHUnauthorizedError { return runnerErrors.NewConflictError("pool manager is not running for %s", r.entity.String()) } - if runner.AgentID != 0 { - if err := r.ghcli.RemoveEntityRunner(r.ctx, runner.AgentID); err != nil { - if errors.Is(err, runnerErrors.ErrUnauthorized) { - slog.With(slog.Any("error", err)).ErrorContext(r.ctx, "failed to remove runner from github") - // Mark the pool as offline from this point forward - r.SetPoolRunningState(false, fmt.Sprintf("failed to remove runner: %q", err)) - slog.With(slog.Any("error", err)).ErrorContext( - r.ctx, "failed to remove runner") - if bypassGHUnauthorizedError { - slog.Info("bypass github unauthorized error is set, marking runner for deletion") - } else { - return fmt.Errorf("error removing runner: %w", err) + resp, err := r.ghcli.RemoveEntityRunner(r.ctx, runner.AgentID) + if err != nil { + if resp != nil { + switch resp.StatusCode { + case http.StatusUnprocessableEntity: + return errors.Wrapf(runnerErrors.ErrBadRequest, "removing runner: %q", err) + case http.StatusNotFound: + // Runner may have been deleted by a finished job, or manually by the user. + slog.DebugContext( + r.ctx, "runner was not found in github", + "agent_id", runner.AgentID) + case http.StatusUnauthorized: + slog.With(slog.Any("error", err)).ErrorContext(r.ctx, "failed to remove runner from github") + // Mark the pool as offline from this point forward + r.setPoolRunningState(false, fmt.Sprintf("failed to remove runner: %q", err)) + slog.With(slog.Any("error", err)).ErrorContext( + r.ctx, "failed to remove runner") + if bypassGHUnauthorizedError { + slog.Info("bypass github unauthorized error is set, marking runner for deletion") + break + } + // evaluate the next switch case. + fallthrough + default: + return errors.Wrap(err, "removing runner") } } else { - return fmt.Errorf("error removing runner: %w", err) + errResp := &github.ErrorResponse{} + if errors.As(err, &errResp) { + if errResp.Response != nil && errResp.Response.StatusCode == http.StatusUnauthorized && bypassGHUnauthorizedError { + slog.Info("bypass github unauthorized error is set, marking runner for deletion") + } else { + return errors.Wrap(err, "removing runner") + } + } else { + // We got a nil response. Assume we are in error. + return errors.Wrap(err, "removing runner") + } } } } @@ -1722,7 +1696,7 @@ func (r *basePoolManager) DeleteRunner(runner params.Instance, forceRemove, bypa slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to update runner", "runner_name", runner.Name) - return fmt.Errorf("error updating runner: %w", err) + return errors.Wrap(err, "updating runner") } return nil } @@ -1753,7 +1727,7 @@ func (r *basePoolManager) DeleteRunner(runner params.Instance, forceRemove, bypa func (r *basePoolManager) consumeQueuedJobs() error { queued, err := r.store.ListEntityJobsByStatus(r.ctx, r.entity.EntityType, r.entity.ID, params.JobStatusQueued) if err != nil { - return fmt.Errorf("error listing queued jobs: %w", err) + return errors.Wrap(err, "listing queued jobs") } poolsCache := poolsForTags{ @@ -1768,16 +1742,16 @@ func (r *basePoolManager) consumeQueuedJobs() error { // Job was handled by us or another entity. slog.DebugContext( r.ctx, "job is locked", - "job_id", job.WorkflowJobID, + "job_id", job.ID, "locking_entity", job.LockedBy.String()) continue } - if time.Since(job.UpdatedAt) < time.Second*r.controllerInfo.JobBackoff() { + if time.Since(job.UpdatedAt) < time.Second*time.Duration(r.controllerInfo.MinimumJobAgeBackoff) { // give the idle runners a chance to pick up the job. slog.DebugContext( r.ctx, "job backoff not reached", "backoff_interval", r.controllerInfo.MinimumJobAgeBackoff, - "job_id", job.WorkflowJobID) + "job_id", job.ID) continue } @@ -1785,12 +1759,12 @@ func (r *basePoolManager) consumeQueuedJobs() error { // Job is still queued in our db, 10 minutes after a matching runner // was spawned. Unlock it and try again. A different job may have picked up // the runner. - if err := r.store.UnlockJob(r.ctx, job.WorkflowJobID, r.ID()); err != nil { + if err := r.store.UnlockJob(r.ctx, job.ID, r.ID()); err != nil { // nolint:golangci-lint,godox // TODO: Implament a cache? Should we return here? slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to unlock job", - "job_id", job.WorkflowJobID) + "job_id", job.ID) continue } } @@ -1803,7 +1777,7 @@ func (r *basePoolManager) consumeQueuedJobs() error { // runner. slog.DebugContext( r.ctx, "job is locked by us", - "job_id", job.WorkflowJobID) + "job_id", job.ID) continue } @@ -1824,29 +1798,29 @@ func (r *basePoolManager) consumeQueuedJobs() error { } runnerCreated := false - if err := r.store.LockJob(r.ctx, job.WorkflowJobID, r.ID()); err != nil { + if err := r.store.LockJob(r.ctx, job.ID, r.ID()); err != nil { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "could not lock job", - "job_id", job.WorkflowJobID) + "job_id", job.ID) continue } jobLabels := []string{ - fmt.Sprintf("%s=%d", jobLabelPrefix, job.WorkflowJobID), + fmt.Sprintf("%s%d", jobLabelPrefix, job.ID), } for i := 0; i < poolRR.Len(); i++ { pool, err := poolRR.Next() if err != nil { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "could not find a pool to create a runner for job", - "job_id", job.WorkflowJobID) + "job_id", job.ID) break } slog.InfoContext( r.ctx, "attempting to create a runner in pool", "pool_id", pool.ID, - "job_id", job.WorkflowJobID) + "job_id", job.ID) if err := r.addRunnerToPool(pool, jobLabels); err != nil { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "could not add runner to pool", @@ -1855,7 +1829,7 @@ func (r *basePoolManager) consumeQueuedJobs() error { } slog.DebugContext(r.ctx, "a new runner was added as a response to queued job", "pool_id", pool.ID, - "job_id", job.WorkflowJobID) + "job_id", job.ID) runnerCreated = true break } @@ -1863,12 +1837,12 @@ func (r *basePoolManager) consumeQueuedJobs() error { if !runnerCreated { slog.WarnContext( r.ctx, "could not create a runner for job; unlocking", - "job_id", job.WorkflowJobID) - if err := r.store.UnlockJob(r.ctx, job.WorkflowJobID, r.ID()); err != nil { + "job_id", job.ID) + if err := r.store.UnlockJob(r.ctx, job.ID, r.ID()); err != nil { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to unlock job", - "job_id", job.WorkflowJobID) - return fmt.Errorf("error unlocking job: %w", err) + "job_id", job.ID) + return errors.Wrap(err, "unlocking job") } } } @@ -1882,12 +1856,12 @@ func (r *basePoolManager) consumeQueuedJobs() error { func (r *basePoolManager) UninstallWebhook(ctx context.Context) error { if r.controllerInfo.ControllerWebhookURL == "" { - return runnerErrors.NewBadRequestError("controller webhook url is empty") + return errors.Wrap(runnerErrors.ErrBadRequest, "controller webhook url is empty") } allHooks, err := r.listHooks(ctx) if err != nil { - return fmt.Errorf("error listing hooks: %w", err) + return errors.Wrap(err, "listing hooks") } var controllerHookID int64 @@ -1925,16 +1899,16 @@ func (r *basePoolManager) UninstallWebhook(ctx context.Context) error { func (r *basePoolManager) InstallHook(ctx context.Context, req *github.Hook) (params.HookInfo, error) { allHooks, err := r.listHooks(ctx) if err != nil { - return params.HookInfo{}, fmt.Errorf("error listing hooks: %w", err) + return params.HookInfo{}, errors.Wrap(err, "listing hooks") } if err := validateHookRequest(r.controllerInfo.ControllerID.String(), r.controllerInfo.WebhookURL, allHooks, req); err != nil { - return params.HookInfo{}, fmt.Errorf("error validating hook request: %w", err) + return params.HookInfo{}, errors.Wrap(err, "validating hook request") } hook, err := r.ghcli.CreateEntityHook(ctx, req) if err != nil { - return params.HookInfo{}, fmt.Errorf("error creating entity hook: %w", err) + return params.HookInfo{}, errors.Wrap(err, "creating entity hook") } if _, err := r.ghcli.PingEntityHook(ctx, hook.GetID()); err != nil { @@ -1949,7 +1923,7 @@ func (r *basePoolManager) InstallHook(ctx context.Context, req *github.Hook) (pa func (r *basePoolManager) InstallWebhook(ctx context.Context, param params.InstallWebhookParams) (params.HookInfo, error) { if r.controllerInfo.ControllerWebhookURL == "" { - return params.HookInfo{}, runnerErrors.NewBadRequestError("controller webhook url is empty") + return params.HookInfo{}, errors.Wrap(runnerErrors.ErrBadRequest, "controller webhook url is empty") } insecureSSL := "0" @@ -1959,9 +1933,9 @@ func (r *basePoolManager) InstallWebhook(ctx context.Context, param params.Insta req := &github.Hook{ Active: github.Ptr(true), Config: &github.HookConfig{ + URL: github.Ptr(r.controllerInfo.ControllerWebhookURL), ContentType: github.Ptr("json"), InsecureSSL: github.Ptr(insecureSSL), - URL: github.Ptr(r.controllerInfo.ControllerWebhookURL), Secret: github.Ptr(r.WebhookSecret()), }, Events: []string{ @@ -1974,15 +1948,15 @@ func (r *basePoolManager) InstallWebhook(ctx context.Context, param params.Insta func (r *basePoolManager) ValidateOwner(job params.WorkflowJob) error { switch r.entity.EntityType { - case params.ForgeEntityTypeRepository: + case params.GithubEntityTypeRepository: if !strings.EqualFold(job.Repository.Name, r.entity.Name) || !strings.EqualFold(job.Repository.Owner.Login, r.entity.Owner) { return runnerErrors.NewBadRequestError("job not meant for this pool manager") } - case params.ForgeEntityTypeOrganization: - if !strings.EqualFold(job.GetOrgName(r.entity.Credentials.ForgeType), r.entity.Owner) { + case params.GithubEntityTypeOrganization: + if !strings.EqualFold(job.Organization.Login, r.entity.Owner) { return runnerErrors.NewBadRequestError("job not meant for this pool manager") } - case params.ForgeEntityTypeEnterprise: + case params.GithubEntityTypeEnterprise: if !strings.EqualFold(job.Enterprise.Slug, r.entity.Owner) { return runnerErrors.NewBadRequestError("job not meant for this pool manager") } @@ -1997,9 +1971,9 @@ func (r *basePoolManager) GithubRunnerRegistrationToken() (string, error) { tk, ghResp, err := r.ghcli.CreateEntityRegistrationToken(r.ctx) if err != nil { if ghResp != nil && ghResp.StatusCode == http.StatusUnauthorized { - return "", runnerErrors.NewUnauthorizedError("error fetching token") + return "", errors.Wrap(runnerErrors.ErrUnauthorized, "fetching token") } - return "", fmt.Errorf("error creating runner token: %w", err) + return "", errors.Wrap(err, "creating runner token") } return *tk.Token, nil } @@ -2008,9 +1982,9 @@ func (r *basePoolManager) FetchTools() ([]commonParams.RunnerApplicationDownload tools, ghResp, err := r.ghcli.ListEntityRunnerApplicationDownloads(r.ctx) if err != nil { if ghResp != nil && ghResp.StatusCode == http.StatusUnauthorized { - return nil, runnerErrors.NewUnauthorizedError("error fetching tools") + return nil, errors.Wrap(runnerErrors.ErrUnauthorized, "fetching tools") } - return nil, fmt.Errorf("error fetching runner tools: %w", err) + return nil, errors.Wrap(err, "fetching runner tools") } ret := []commonParams.RunnerApplicationDownload{} @@ -2023,10 +1997,48 @@ func (r *basePoolManager) FetchTools() ([]commonParams.RunnerApplicationDownload return ret, nil } +func (r *basePoolManager) GetGithubRunners() ([]*github.Runner, error) { + opts := github.ListRunnersOptions{ + ListOptions: github.ListOptions{ + PerPage: 100, + }, + } + var allRunners []*github.Runner + + for { + runners, ghResp, err := r.ghcli.ListEntityRunners(r.ctx, &opts) + if err != nil { + if ghResp != nil && ghResp.StatusCode == http.StatusUnauthorized { + return nil, errors.Wrap(runnerErrors.ErrUnauthorized, "fetching runners") + } + return nil, errors.Wrap(err, "fetching runners") + } + allRunners = append(allRunners, runners.Runners...) + if ghResp.NextPage == 0 { + break + } + opts.Page = ghResp.NextPage + } + + return allRunners, nil +} + +func (r *basePoolManager) GithubURL() string { + switch r.entity.EntityType { + case params.GithubEntityTypeRepository: + return fmt.Sprintf("%s/%s/%s", r.entity.Credentials.BaseURL, r.entity.Owner, r.entity.Name) + case params.GithubEntityTypeOrganization: + return fmt.Sprintf("%s/%s", r.entity.Credentials.BaseURL, r.entity.Owner) + case params.GithubEntityTypeEnterprise: + return fmt.Sprintf("%s/enterprises/%s", r.entity.Credentials.BaseURL, r.entity.Owner) + } + return "" +} + func (r *basePoolManager) GetWebhookInfo(ctx context.Context) (params.HookInfo, error) { allHooks, err := r.listHooks(ctx) if err != nil { - return params.HookInfo{}, fmt.Errorf("error listing hooks: %w", err) + return params.HookInfo{}, errors.Wrap(err, "listing hooks") } trimmedBase := strings.TrimRight(r.controllerInfo.WebhookURL, "/") trimmedController := strings.TrimRight(r.controllerInfo.ControllerWebhookURL, "/") diff --git a/runner/pool/stub_client.go b/runner/pool/stub_client.go index 0afd6a52..eef4afde 100644 --- a/runner/pool/stub_client.go +++ b/runner/pool/stub_client.go @@ -1,22 +1,7 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package pool import ( "context" - "net/url" "github.com/google/go-github/v72/github" @@ -55,8 +40,8 @@ func (s *stubGithubClient) ListEntityRunnerApplicationDownloads(_ context.Contex return nil, nil, s.err } -func (s *stubGithubClient) RemoveEntityRunner(_ context.Context, _ int64) error { - return s.err +func (s *stubGithubClient) RemoveEntityRunner(_ context.Context, _ int64) (*github.Response, error) { + return nil, s.err } func (s *stubGithubClient) CreateEntityRegistrationToken(_ context.Context) (*github.RegistrationToken, *github.Response, error) { @@ -70,19 +55,3 @@ func (s *stubGithubClient) GetEntityJITConfig(_ context.Context, _ string, _ par func (s *stubGithubClient) GetWorkflowJobByID(_ context.Context, _, _ string, _ int64) (*github.WorkflowJob, *github.Response, error) { return nil, nil, s.err } - -func (s *stubGithubClient) GetEntity() params.ForgeEntity { - return params.ForgeEntity{} -} - -func (s *stubGithubClient) GithubBaseURL() *url.URL { - return nil -} - -func (s *stubGithubClient) RateLimit(_ context.Context) (*github.RateLimits, error) { - return nil, s.err -} - -func (s *stubGithubClient) GetEntityRunnerGroupIDByName(_ context.Context, _ string) (int64, error) { - return 0, s.err -} diff --git a/runner/pool/util.go b/runner/pool/util.go index d58f90a3..f78d0e32 100644 --- a/runner/pool/util.go +++ b/runner/pool/util.go @@ -1,38 +1,84 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package pool import ( - "context" - "fmt" - "log/slog" - "net/http" - "net/url" + "sort" "strings" - "time" + "sync" + "sync/atomic" "github.com/google/go-github/v72/github" runnerErrors "github.com/cloudbase/garm-provider-common/errors" commonParams "github.com/cloudbase/garm-provider-common/params" - "github.com/cloudbase/garm/cache" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" "github.com/cloudbase/garm/params" ) +type poolCacheStore interface { + Next() (params.Pool, error) + Reset() + Len() int +} + +type poolRoundRobin struct { + pools []params.Pool + next uint32 +} + +func (p *poolRoundRobin) Next() (params.Pool, error) { + if len(p.pools) == 0 { + return params.Pool{}, runnerErrors.ErrNoPoolsAvailable + } + + n := atomic.AddUint32(&p.next, 1) + return p.pools[(int(n)-1)%len(p.pools)], nil +} + +func (p *poolRoundRobin) Len() int { + return len(p.pools) +} + +func (p *poolRoundRobin) Reset() { + atomic.StoreUint32(&p.next, 0) +} + +type poolsForTags struct { + pools sync.Map + poolCacheType params.PoolBalancerType +} + +func (p *poolsForTags) Get(tags []string) (poolCacheStore, bool) { + sort.Strings(tags) + key := strings.Join(tags, "^") + + v, ok := p.pools.Load(key) + if !ok { + return nil, false + } + poolCache := v.(*poolRoundRobin) + if p.poolCacheType == params.PoolBalancerTypePack { + // When we service a list of jobs, we want to try each pool in turn + // for each job. Pools are sorted by priority so we always start from the + // highest priority pool and move on to the next if the first one is full. + poolCache.Reset() + } + return poolCache, true +} + +func (p *poolsForTags) Add(tags []string, pools []params.Pool) poolCacheStore { + sort.Slice(pools, func(i, j int) bool { + return pools[i].Priority > pools[j].Priority + }) + + sort.Strings(tags) + key := strings.Join(tags, "^") + + poolRR := &poolRoundRobin{pools: pools} + v, _ := p.pools.LoadOrStore(key, poolRR) + return v.(*poolRoundRobin) +} + func instanceInList(instanceName string, instances []commonParams.ProviderInstance) (commonParams.ProviderInstance, bool) { for _, val := range instances { if val.Name == instanceName { @@ -45,21 +91,23 @@ func instanceInList(instanceName string, instances []commonParams.ProviderInstan func controllerIDFromLabels(labels []string) string { for _, lbl := range labels { if strings.HasPrefix(lbl, controllerLabelPrefix) { - trimLength := min(len(controllerLabelPrefix)+1, len(lbl)) - return lbl[trimLength:] + return lbl[len(controllerLabelPrefix):] } } return "" } -func labelsFromRunner(runner forgeRunner) []string { - if runner.Labels == nil { +func labelsFromRunner(runner *github.Runner) []string { + if runner == nil || runner.Labels == nil { return []string{} } var labels []string for _, val := range runner.Labels { - labels = append(labels, val.Name) + if val == nil { + continue + } + labels = append(labels, val.GetName()) } return labels } @@ -71,7 +119,7 @@ func isManagedRunner(labels []string, controllerID string) bool { return runnerControllerID == controllerID } -func composeWatcherFilters(entity params.ForgeEntity) dbCommon.PayloadFilterFunc { +func composeWatcherFilters(entity params.GithubEntity) dbCommon.PayloadFilterFunc { // We want to watch for changes in either the controller or the // entity itself. return watcher.WithAny( @@ -83,191 +131,6 @@ func composeWatcherFilters(entity params.ForgeEntity) dbCommon.PayloadFilterFunc // Any operation on the entity we're managing the pool for. watcher.WithEntityFilter(entity), // Watch for changes to the github credentials - watcher.WithForgeCredentialsFilter(entity.Credentials), + watcher.WithGithubCredentialsFilter(entity.Credentials), ) } - -func (r *basePoolManager) waitForToolsOrCancel() (hasTools, stopped bool) { - ticker := time.NewTicker(1 * time.Second) - defer ticker.Stop() - select { - case <-ticker.C: - if _, err := cache.GetGithubToolsCache(r.entity.ID); err != nil { - return false, false - } - return true, false - case <-r.quit: - return false, true - case <-r.ctx.Done(): - return false, true - } -} - -func validateHookRequest(controllerID, baseURL string, allHooks []*github.Hook, req *github.Hook) error { - parsed, err := url.Parse(baseURL) - if err != nil { - return fmt.Errorf("error parsing webhook url: %w", err) - } - - partialMatches := []string{} - for _, hook := range allHooks { - hookURL := strings.ToLower(hook.Config.GetURL()) - if hookURL == "" { - continue - } - - if hook.Config.GetURL() == req.Config.GetURL() { - return runnerErrors.NewConflictError("hook already installed") - } else if strings.Contains(hookURL, controllerID) || strings.Contains(hookURL, parsed.Hostname()) { - partialMatches = append(partialMatches, hook.Config.GetURL()) - } - } - - if len(partialMatches) > 0 { - return runnerErrors.NewConflictError("a webhook containing the controller ID or hostname of this contreoller is already installed on this repository") - } - - return nil -} - -func hookToParamsHookInfo(hook *github.Hook) params.HookInfo { - hookURL := hook.Config.GetURL() - - insecureSSLConfig := hook.Config.GetInsecureSSL() - insecureSSL := insecureSSLConfig == "1" - - return params.HookInfo{ - ID: *hook.ID, - URL: hookURL, - Events: hook.Events, - Active: *hook.Active, - InsecureSSL: insecureSSL, - } -} - -func (r *basePoolManager) listHooks(ctx context.Context) ([]*github.Hook, error) { - opts := github.ListOptions{ - PerPage: 100, - } - var allHooks []*github.Hook - for { - hooks, ghResp, err := r.ghcli.ListEntityHooks(ctx, &opts) - if err != nil { - if ghResp != nil && ghResp.StatusCode == http.StatusNotFound { - return nil, runnerErrors.NewBadRequestError("repository not found or your PAT does not have access to manage webhooks") - } - return nil, fmt.Errorf("error fetching hooks: %w", err) - } - allHooks = append(allHooks, hooks...) - if ghResp.NextPage == 0 { - break - } - opts.Page = ghResp.NextPage - } - return allHooks, nil -} - -func (r *basePoolManager) listRunnersWithPagination() ([]forgeRunner, error) { - opts := github.ListRunnersOptions{ - ListOptions: github.ListOptions{ - PerPage: 100, - }, - } - var allRunners []*github.Runner - - // Paginating like this can lead to a situation where if we have many pages of runners, - // while we paginate, a particular runner can move from page n to page n-1 while we move - // from page n-1 to page n. In situations such as that, we end up with a list of runners - // that does not contain the runner that swapped pages while we were paginating. - // Sadly, the GitHub API does not allow listing more than 100 runners per page. - for { - runners, ghResp, err := r.ghcli.ListEntityRunners(r.ctx, &opts) - if err != nil { - if ghResp != nil && ghResp.StatusCode == http.StatusUnauthorized { - return nil, runnerErrors.NewUnauthorizedError("error fetching runners") - } - return nil, fmt.Errorf("error fetching runners: %w", err) - } - allRunners = append(allRunners, runners.Runners...) - if ghResp.NextPage == 0 { - break - } - opts.Page = ghResp.NextPage - } - - ret := make([]forgeRunner, len(allRunners)) - for idx, val := range allRunners { - ret[idx] = forgeRunner{ - ID: val.GetID(), - Name: val.GetName(), - Status: val.GetStatus(), - Labels: make([]RunnerLabels, len(val.Labels)), - } - for labelIdx, label := range val.Labels { - ret[idx].Labels[labelIdx] = RunnerLabels{ - Name: label.GetName(), - Type: label.GetType(), - ID: label.GetID(), - } - } - } - - return ret, nil -} - -func (r *basePoolManager) listRunnersWithScaleSetAPI() ([]forgeRunner, error) { - if r.scaleSetClient == nil { - return nil, fmt.Errorf("scaleset client not initialized") - } - - runners, err := r.scaleSetClient.ListAllRunners(r.ctx) - if err != nil { - return nil, fmt.Errorf("failed to list runners through scaleset API: %w", err) - } - - ret := []forgeRunner{} - for _, runner := range runners.RunnerReferences { - if runner.RunnerScaleSetID != 0 { - // skip scale set runners. - continue - } - run := forgeRunner{ - Name: runner.Name, - ID: runner.ID, - Status: string(runner.GetStatus()), - Labels: make([]RunnerLabels, len(runner.Labels)), - } - for labelIDX, label := range runner.Labels { - run.Labels[labelIDX] = RunnerLabels{ - Name: label.Name, - Type: label.Type, - } - } - ret = append(ret, run) - } - return ret, nil -} - -func (r *basePoolManager) GetGithubRunners() ([]forgeRunner, error) { - // Gitea has no scale sets API - if r.scaleSetClient == nil { - return r.listRunnersWithPagination() - } - - // try the scale sets API for github - runners, err := r.listRunnersWithScaleSetAPI() - if err != nil { - slog.WarnContext(r.ctx, "failed to list runners via scaleset API; falling back to pagination", "error", err) - return r.listRunnersWithPagination() - } - - entityInstances := cache.GetEntityInstances(r.entity.ID) - if len(entityInstances) > 0 && len(runners) == 0 { - // I have trust issues in the undocumented API. We seem to have runners for this - // entity, but the scaleset API returned nothing and no error. Fall back to pagination. - slog.DebugContext(r.ctx, "the scaleset api returned nothing, but we seem to have runners in the db; falling back to paginated API runner list") - return r.listRunnersWithPagination() - } - slog.DebugContext(r.ctx, "Scaleset API runner list succeeded", "runners", runners) - return runners, nil -} diff --git a/runner/pool/util_test.go b/runner/pool/util_test.go index 67d31f76..bcfea879 100644 --- a/runner/pool/util_test.go +++ b/runner/pool/util_test.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package pool import ( diff --git a/runner/pool/watcher.go b/runner/pool/watcher.go index 999b52c6..b17494d5 100644 --- a/runner/pool/watcher.go +++ b/runner/pool/watcher.go @@ -1,32 +1,20 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package pool import ( "log/slog" + "github.com/pkg/errors" + runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" runnerCommon "github.com/cloudbase/garm/runner/common" - ghClient "github.com/cloudbase/garm/util/github" + garmUtil "github.com/cloudbase/garm/util" ) // entityGetter is implemented by all github entities (repositories, organizations and enterprises) type entityGetter interface { - GetEntity() (params.ForgeEntity, error) + GetEntity() (params.GithubEntity, error) } func (r *basePoolManager) handleControllerUpdateEvent(controllerInfo params.ControllerInfo) { @@ -40,17 +28,17 @@ func (r *basePoolManager) handleControllerUpdateEvent(controllerInfo params.Cont func (r *basePoolManager) getClientOrStub() runnerCommon.GithubClient { var err error var ghc runnerCommon.GithubClient - ghc, err = ghClient.Client(r.ctx, r.entity) + ghc, err = garmUtil.GithubClient(r.ctx, r.entity, r.entity.Credentials) if err != nil { slog.WarnContext(r.ctx, "failed to create github client", "error", err) ghc = &stubGithubClient{ - err: runnerErrors.NewUnauthorizedError("failed to create github client; please update credentials"), + err: errors.Wrapf(runnerErrors.ErrUnauthorized, "failed to create github client; please update credentials: %v", err), } } return ghc } -func (r *basePoolManager) handleEntityUpdate(entity params.ForgeEntity, operation common.OperationType) { +func (r *basePoolManager) handleEntityUpdate(entity params.GithubEntity, operation common.OperationType) { slog.DebugContext(r.ctx, "received entity operation", "entity", entity.ID, "operation", operation) if r.entity.ID != entity.ID { slog.WarnContext(r.ctx, "entity ID mismatch; stale event? refusing to update", "entity", entity.ID) @@ -68,7 +56,7 @@ func (r *basePoolManager) handleEntityUpdate(entity params.ForgeEntity, operatio return } - credentialsUpdate := r.entity.Credentials.GetID() != entity.Credentials.GetID() + credentialsUpdate := r.entity.Credentials.ID != entity.Credentials.ID defer func() { slog.DebugContext(r.ctx, "deferred tools update", "credentials_update", credentialsUpdate) if !credentialsUpdate { @@ -97,7 +85,7 @@ func (r *basePoolManager) handleEntityUpdate(entity params.ForgeEntity, operatio slog.DebugContext(r.ctx, "lock released", "entity", entity.ID) } -func (r *basePoolManager) handleCredentialsUpdate(credentials params.ForgeCredentials) { +func (r *basePoolManager) handleCredentialsUpdate(credentials params.GithubCredentials) { // when we switch credentials on an entity (like from one app to another or from an app // to a PAT), we may still get events for the previous credentials as the channel is buffered. // The watcher will watch for changes to the entity itself, which includes events that @@ -109,12 +97,12 @@ func (r *basePoolManager) handleCredentialsUpdate(credentials params.ForgeCreden // test-repo. This function would handle situations where "org_pat" is updated. // If "test-repo" is updated with new credentials, that event is handled above in // handleEntityUpdate. - shouldUpdateTools := r.entity.Credentials.GetID() == credentials.GetID() + shouldUpdateTools := r.entity.Credentials.ID == credentials.ID defer func() { if !shouldUpdateTools { return } - slog.DebugContext(r.ctx, "deferred tools update", "credentials_id", credentials.GetID()) + slog.DebugContext(r.ctx, "deferred tools update", "credentials_id", credentials.ID) if err := r.updateTools(); err != nil { slog.ErrorContext(r.ctx, "failed to update tools", "error", err) } @@ -122,12 +110,12 @@ func (r *basePoolManager) handleCredentialsUpdate(credentials params.ForgeCreden r.mux.Lock() if !shouldUpdateTools { - slog.InfoContext(r.ctx, "credential ID mismatch; stale event?", "credentials_id", credentials.GetID()) + slog.InfoContext(r.ctx, "credential ID mismatch; stale event?", "credentials_id", credentials.ID) r.mux.Unlock() return } - slog.DebugContext(r.ctx, "updating credentials", "credentials_id", credentials.GetID()) + slog.DebugContext(r.ctx, "updating credentials", "credentials_id", credentials.ID) r.entity.Credentials = credentials r.ghcli = r.getClientOrStub() r.mux.Unlock() @@ -136,8 +124,8 @@ func (r *basePoolManager) handleCredentialsUpdate(credentials params.ForgeCreden func (r *basePoolManager) handleWatcherEvent(event common.ChangePayload) { dbEntityType := common.DatabaseEntityType(r.entity.EntityType) switch event.EntityType { - case common.GithubCredentialsEntityType, common.GiteaCredentialsEntityType: - credentials, ok := event.Payload.(params.ForgeCredentials) + case common.GithubCredentialsEntityType: + credentials, ok := event.Payload.(params.GithubCredentials) if !ok { slog.ErrorContext(r.ctx, "failed to cast payload to github credentials") return diff --git a/runner/pools.go b/runner/pools.go index ffd3b9c8..f2eb3c25 100644 --- a/runner/pools.go +++ b/runner/pools.go @@ -16,8 +16,8 @@ package runner import ( "context" - "errors" - "fmt" + + "github.com/pkg/errors" runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/auth" @@ -31,7 +31,7 @@ func (r *Runner) ListAllPools(ctx context.Context) ([]params.Pool, error) { pools, err := r.store.ListAllPools(ctx) if err != nil { - return nil, fmt.Errorf("error fetching pools: %w", err) + return nil, errors.Wrap(err, "fetching pools") } return pools, nil } @@ -43,7 +43,7 @@ func (r *Runner) GetPoolByID(ctx context.Context, poolID string) (params.Pool, e pool, err := r.store.GetPoolByID(ctx, poolID) if err != nil { - return params.Pool{}, fmt.Errorf("error fetching pool: %w", err) + return params.Pool{}, errors.Wrap(err, "fetching pool") } return pool, nil } @@ -56,7 +56,7 @@ func (r *Runner) DeletePoolByID(ctx context.Context, poolID string) error { pool, err := r.store.GetPoolByID(ctx, poolID) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { - return fmt.Errorf("error fetching pool: %w", err) + return errors.Wrap(err, "fetching pool") } return nil } @@ -66,7 +66,7 @@ func (r *Runner) DeletePoolByID(ctx context.Context, poolID string) error { } if err := r.store.DeletePoolByID(ctx, poolID); err != nil { - return fmt.Errorf("error deleting pool: %w", err) + return errors.Wrap(err, "deleting pool") } return nil } @@ -78,7 +78,7 @@ func (r *Runner) UpdatePoolByID(ctx context.Context, poolID string, param params pool, err := r.store.GetPoolByID(ctx, poolID) if err != nil { - return params.Pool{}, fmt.Errorf("error fetching pool: %w", err) + return params.Pool{}, errors.Wrap(err, "fetching pool") } maxRunners := pool.MaxRunners @@ -99,14 +99,14 @@ func (r *Runner) UpdatePoolByID(ctx context.Context, poolID string, param params return params.Pool{}, runnerErrors.NewBadRequestError("min_idle_runners cannot be larger than max_runners") } - entity, err := pool.GetEntity() + entity, err := pool.GithubEntity() if err != nil { - return params.Pool{}, fmt.Errorf("error getting entity: %w", err) + return params.Pool{}, errors.Wrap(err, "getting entity") } newPool, err := r.store.UpdateEntityPool(ctx, entity, poolID, param) if err != nil { - return params.Pool{}, fmt.Errorf("error updating pool: %w", err) + return params.Pool{}, errors.Wrap(err, "updating pool") } return newPool, nil } @@ -118,7 +118,7 @@ func (r *Runner) ListAllJobs(ctx context.Context) ([]params.Job, error) { jobs, err := r.store.ListAllJobs(ctx) if err != nil { - return nil, fmt.Errorf("error fetching jobs: %w", err) + return nil, errors.Wrap(err, "fetching jobs") } return jobs, nil } diff --git a/runner/pools_test.go b/runner/pools_test.go index 2a2aea5d..918598d1 100644 --- a/runner/pools_test.go +++ b/runner/pools_test.go @@ -47,9 +47,9 @@ type PoolTestSuite struct { Runner *Runner adminCtx context.Context - testCreds params.ForgeCredentials - secondaryTestCreds params.ForgeCredentials - githubEndpoint params.ForgeEndpoint + testCreds params.GithubCredentials + secondaryTestCreds params.GithubCredentials + githubEndpoint params.GithubEndpoint } func (s *PoolTestSuite) SetupTest() { @@ -69,15 +69,15 @@ func (s *PoolTestSuite) SetupTest() { s.secondaryTestCreds = garmTesting.CreateTestGithubCredentials(s.adminCtx, "secondary-creds", db, s.T(), s.githubEndpoint) // create an organization for testing purposes - org, err := db.CreateOrganization(s.adminCtx, "test-org", s.testCreds, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) + org, err := db.CreateOrganization(s.adminCtx, "test-org", s.testCreds.Name, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) if err != nil { s.FailNow(fmt.Sprintf("failed to create org: %s", err)) } // create some pool objects in the database, for testing purposes - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: org.ID, - EntityType: params.ForgeEntityTypeOrganization, + EntityType: params.GithubEntityTypeOrganization, } orgPools := []params.Pool{} for i := 1; i <= 3; i++ { @@ -169,7 +169,7 @@ func (s *PoolTestSuite) TestGetPoolByIDNotFound() { s.Require().Nil(err) _, err = s.Runner.GetPoolByID(s.Fixtures.AdminContext, s.Fixtures.Pools[0].ID) s.Require().NotNil(err) - s.Require().Equal("error fetching pool: error fetching pool by ID: not found", err.Error()) + s.Require().Equal("fetching pool: fetching pool by ID: not found", err.Error()) } func (s *PoolTestSuite) TestDeletePoolByID() { @@ -178,7 +178,7 @@ func (s *PoolTestSuite) TestDeletePoolByID() { s.Require().Nil(err) _, err = s.Fixtures.Store.GetPoolByID(s.Fixtures.AdminContext, s.Fixtures.Pools[0].ID) s.Require().NotNil(err) - s.Require().Equal("error fetching pool by ID: not found", err.Error()) + s.Require().Equal("fetching pool by ID: not found", err.Error()) } func (s *PoolTestSuite) TestDeletePoolByIDErrUnauthorized() { @@ -220,7 +220,7 @@ func (s *PoolTestSuite) TestTestUpdatePoolByIDInvalidPoolID() { _, err := s.Runner.UpdatePoolByID(s.Fixtures.AdminContext, "dummy-pool-id", s.Fixtures.UpdatePoolParams) s.Require().NotNil(err) - s.Require().Equal("error fetching pool: error fetching pool by ID: error parsing id: invalid request", err.Error()) + s.Require().Equal("fetching pool: fetching pool by ID: parsing id: invalid request", err.Error()) } func (s *PoolTestSuite) TestTestUpdatePoolByIDRunnerBootstrapTimeoutFailed() { diff --git a/runner/providers/common/common.go b/runner/providers/common/common.go index f1a5a66d..4e49e080 100644 --- a/runner/providers/common/common.go +++ b/runner/providers/common/common.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package common import ( diff --git a/runner/providers/external/external.go b/runner/providers/external/external.go index 46e3dd47..23b9b894 100644 --- a/runner/providers/external/external.go +++ b/runner/providers/external/external.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package external import ( diff --git a/runner/providers/providers.go b/runner/providers/providers.go index ada11729..165fb585 100644 --- a/runner/providers/providers.go +++ b/runner/providers/providers.go @@ -16,9 +16,10 @@ package providers import ( "context" - "fmt" "log/slog" + "github.com/pkg/errors" + "github.com/cloudbase/garm/config" "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner/common" @@ -38,11 +39,11 @@ func LoadProvidersFromConfig(ctx context.Context, cfg config.Config, controllerI conf := providerCfg provider, err := external.NewProvider(ctx, &conf, controllerID) if err != nil { - return nil, fmt.Errorf("error creating provider: %w", err) + return nil, errors.Wrap(err, "creating provider") } providers[providerCfg.Name] = provider default: - return nil, fmt.Errorf("unknown provider type %s", providerCfg.ProviderType) + return nil, errors.Errorf("unknown provider type %s", providerCfg.ProviderType) } } return providers, nil diff --git a/runner/providers/util/util.go b/runner/providers/util/util.go index fb3c12bd..2948730b 100644 --- a/runner/providers/util/util.go +++ b/runner/providers/util/util.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package util import ( diff --git a/runner/providers/v0.1.0/external.go b/runner/providers/v0.1.0/external.go index bb96f4d7..6dd0ef46 100644 --- a/runner/providers/v0.1.0/external.go +++ b/runner/providers/v0.1.0/external.go @@ -1,27 +1,14 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package v010 import ( "context" "encoding/json" - "errors" "fmt" "log/slog" "os/exec" + "github.com/pkg/errors" + garmErrors "github.com/cloudbase/garm-provider-common/errors" commonExecution "github.com/cloudbase/garm-provider-common/execution/common" commonParams "github.com/cloudbase/garm-provider-common/params" @@ -43,7 +30,7 @@ func NewProvider(ctx context.Context, cfg *config.Provider, controllerID string) execPath, err := cfg.External.ExecutablePath() if err != nil { - return nil, fmt.Errorf("error fetching executable path: %w", err) + return nil, errors.Wrap(err, "fetching executable path") } // Set GARM_INTERFACE_VERSION to the version of the interface that the external @@ -82,7 +69,7 @@ func (e *external) CreateInstance(ctx context.Context, bootstrapParams commonPar asJs, err := json.Marshal(bootstrapParams) if err != nil { - return commonParams.ProviderInstance{}, fmt.Errorf("error serializing bootstrap params: %w", err) + return commonParams.ProviderInstance{}, errors.Wrap(err, "serializing bootstrap params") } metrics.InstanceOperationCount.WithLabelValues( diff --git a/runner/providers/v0.1.1/external.go b/runner/providers/v0.1.1/external.go index 6e43dce7..530a2645 100644 --- a/runner/providers/v0.1.1/external.go +++ b/runner/providers/v0.1.1/external.go @@ -1,28 +1,15 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package v011 import ( "context" "encoding/base64" "encoding/json" - "errors" "fmt" "log/slog" "os/exec" + "github.com/pkg/errors" + garmErrors "github.com/cloudbase/garm-provider-common/errors" commonExecution "github.com/cloudbase/garm-provider-common/execution/common" commonParams "github.com/cloudbase/garm-provider-common/params" @@ -43,7 +30,7 @@ func NewProvider(ctx context.Context, cfg *config.Provider, controllerID string) execPath, err := cfg.External.ExecutablePath() if err != nil { - return nil, fmt.Errorf("error fetching executable path: %w", err) + return nil, errors.Wrap(err, "fetching executable path") } // Set GARM_INTERFACE_VERSION to the version of the interface that the external @@ -74,7 +61,7 @@ func (e *external) CreateInstance(ctx context.Context, bootstrapParams commonPar extraspecs := bootstrapParams.ExtraSpecs extraspecsValue, err := json.Marshal(extraspecs) if err != nil { - return commonParams.ProviderInstance{}, fmt.Errorf("error serializing extraspecs: %w", err) + return commonParams.ProviderInstance{}, errors.Wrap(err, "serializing extraspecs") } // Encode the extraspecs as base64 to avoid issues with special characters. base64EncodedExtraSpecs := base64.StdEncoding.EncodeToString(extraspecsValue) @@ -89,7 +76,7 @@ func (e *external) CreateInstance(ctx context.Context, bootstrapParams commonPar asJs, err := json.Marshal(bootstrapParams) if err != nil { - return commonParams.ProviderInstance{}, fmt.Errorf("error serializing bootstrap params: %w", err) + return commonParams.ProviderInstance{}, errors.Wrap(err, "serializing bootstrap params") } metrics.InstanceOperationCount.WithLabelValues( @@ -135,7 +122,7 @@ func (e *external) DeleteInstance(ctx context.Context, instance string, deleteIn extraspecs := deleteInstanceParams.DeleteInstanceV011.PoolInfo.ExtraSpecs extraspecsValue, err := json.Marshal(extraspecs) if err != nil { - return fmt.Errorf("error serializing extraspecs: %w", err) + return errors.Wrap(err, "serializing extraspecs") } // Encode the extraspecs as base64 to avoid issues with special characters. base64EncodedExtraSpecs := base64.StdEncoding.EncodeToString(extraspecsValue) @@ -172,7 +159,7 @@ func (e *external) GetInstance(ctx context.Context, instance string, getInstance extraspecs := getInstanceParams.GetInstanceV011.PoolInfo.ExtraSpecs extraspecsValue, err := json.Marshal(extraspecs) if err != nil { - return commonParams.ProviderInstance{}, fmt.Errorf("error serializing extraspecs: %w", err) + return commonParams.ProviderInstance{}, errors.Wrap(err, "serializing extraspecs") } // Encode the extraspecs as base64 to avoid issues with special characters. base64EncodedExtraSpecs := base64.StdEncoding.EncodeToString(extraspecsValue) @@ -227,7 +214,7 @@ func (e *external) ListInstances(ctx context.Context, poolID string, listInstanc extraspecs := listInstancesParams.ListInstancesV011.PoolInfo.ExtraSpecs extraspecsValue, err := json.Marshal(extraspecs) if err != nil { - return []commonParams.ProviderInstance{}, fmt.Errorf("error serializing extraspecs: %w", err) + return []commonParams.ProviderInstance{}, errors.Wrap(err, "serializing extraspecs") } // Encode the extraspecs as base64 to avoid issues with special characters. base64EncodedExtraSpecs := base64.StdEncoding.EncodeToString(extraspecsValue) @@ -282,7 +269,7 @@ func (e *external) RemoveAllInstances(ctx context.Context, removeAllInstances co extraspecs := removeAllInstances.RemoveAllInstancesV011.PoolInfo.ExtraSpecs extraspecsValue, err := json.Marshal(extraspecs) if err != nil { - return fmt.Errorf("error serializing extraspecs: %w", err) + return errors.Wrap(err, "serializing extraspecs") } // Encode the extraspecs as base64 to avoid issues with special characters. base64EncodedExtraSpecs := base64.StdEncoding.EncodeToString(extraspecsValue) @@ -316,7 +303,7 @@ func (e *external) Stop(ctx context.Context, instance string, stopParams common. extraspecs := stopParams.StopV011.PoolInfo.ExtraSpecs extraspecsValue, err := json.Marshal(extraspecs) if err != nil { - return fmt.Errorf("error serializing extraspecs: %w", err) + return errors.Wrap(err, "serializing extraspecs") } // Encode the extraspecs as base64 to avoid issues with special characters. base64EncodedExtraSpecs := base64.StdEncoding.EncodeToString(extraspecsValue) @@ -350,7 +337,7 @@ func (e *external) Start(ctx context.Context, instance string, startParams commo extraspecs := startParams.StartV011.PoolInfo.ExtraSpecs extraspecsValue, err := json.Marshal(extraspecs) if err != nil { - return fmt.Errorf("error serializing extraspecs: %w", err) + return errors.Wrap(err, "serializing extraspecs") } // Encode the extraspecs as base64 to avoid issues with special characters. base64EncodedExtraSpecs := base64.StdEncoding.EncodeToString(extraspecsValue) diff --git a/runner/repositories.go b/runner/repositories.go index 0f21d882..5edff6ff 100644 --- a/runner/repositories.go +++ b/runner/repositories.go @@ -16,11 +16,12 @@ package runner import ( "context" - "errors" "fmt" "log/slog" "strings" + "github.com/pkg/errors" + runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/auth" "github.com/cloudbase/garm/params" @@ -34,19 +35,10 @@ func (r *Runner) CreateRepository(ctx context.Context, param params.CreateRepoPa } if err := param.Validate(); err != nil { - return params.Repository{}, fmt.Errorf("error validating params: %w", err) - } - - var creds params.ForgeCredentials - switch param.ForgeType { - case params.GithubEndpointType: - creds, err = r.store.GetGithubCredentialsByName(ctx, param.CredentialsName, true) - case params.GiteaEndpointType: - creds, err = r.store.GetGiteaCredentialsByName(ctx, param.CredentialsName, true) - default: - creds, err = r.ResolveForgeCredentialByName(ctx, param.CredentialsName) + return params.Repository{}, errors.Wrap(err, "validating params") } + creds, err := r.store.GetGithubCredentialsByName(ctx, param.CredentialsName, true) if err != nil { return params.Repository{}, runnerErrors.NewBadRequestError("credentials %s not defined", param.CredentialsName) } @@ -54,15 +46,15 @@ func (r *Runner) CreateRepository(ctx context.Context, param params.CreateRepoPa _, err = r.store.GetRepository(ctx, param.Owner, param.Name, creds.Endpoint.Name) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { - return params.Repository{}, fmt.Errorf("error fetching repo: %w", err) + return params.Repository{}, errors.Wrap(err, "fetching repo") } } else { return params.Repository{}, runnerErrors.NewConflictError("repository %s/%s already exists", param.Owner, param.Name) } - repo, err = r.store.CreateRepository(ctx, param.Owner, param.Name, creds, param.WebhookSecret, param.PoolBalancerType) + repo, err = r.store.CreateRepository(ctx, param.Owner, param.Name, creds.Name, param.WebhookSecret, param.PoolBalancerType) if err != nil { - return params.Repository{}, fmt.Errorf("error creating repository: %w", err) + return params.Repository{}, errors.Wrap(err, "creating repository") } defer func() { @@ -79,7 +71,7 @@ func (r *Runner) CreateRepository(ctx context.Context, param params.CreateRepoPa // updating the store. poolMgr, err := r.poolManagerCtrl.CreateRepoPoolManager(r.ctx, repo, r.providers, r.store) if err != nil { - return params.Repository{}, fmt.Errorf("error creating repo pool manager: %w", err) + return params.Repository{}, errors.Wrap(err, "creating repo pool manager") } if err := poolMgr.Start(); err != nil { if deleteErr := r.poolManagerCtrl.DeleteRepoPoolManager(repo); deleteErr != nil { @@ -87,19 +79,19 @@ func (r *Runner) CreateRepository(ctx context.Context, param params.CreateRepoPa ctx, "failed to cleanup pool manager for repo", "repository_id", repo.ID) } - return params.Repository{}, fmt.Errorf("error starting repo pool manager: %w", err) + return params.Repository{}, errors.Wrap(err, "starting repo pool manager") } return repo, nil } -func (r *Runner) ListRepositories(ctx context.Context, filter params.RepositoryFilter) ([]params.Repository, error) { +func (r *Runner) ListRepositories(ctx context.Context) ([]params.Repository, error) { if !auth.IsAdmin(ctx) { return nil, runnerErrors.ErrUnauthorized } - repos, err := r.store.ListRepositories(ctx, filter) + repos, err := r.store.ListRepositories(ctx) if err != nil { - return nil, fmt.Errorf("error listing repositories: %w", err) + return nil, errors.Wrap(err, "listing repositories") } var allRepos []params.Repository @@ -125,7 +117,7 @@ func (r *Runner) GetRepositoryByID(ctx context.Context, repoID string) (params.R repo, err := r.store.GetRepositoryByID(ctx, repoID) if err != nil { - return params.Repository{}, fmt.Errorf("error fetching repository: %w", err) + return params.Repository{}, errors.Wrap(err, "fetching repository") } poolMgr, err := r.poolManagerCtrl.GetRepoPoolManager(repo) @@ -144,17 +136,17 @@ func (r *Runner) DeleteRepository(ctx context.Context, repoID string, keepWebhoo repo, err := r.store.GetRepositoryByID(ctx, repoID) if err != nil { - return fmt.Errorf("error fetching repo: %w", err) + return errors.Wrap(err, "fetching repo") } entity, err := repo.GetEntity() if err != nil { - return fmt.Errorf("error getting entity: %w", err) + return errors.Wrap(err, "getting entity") } pools, err := r.store.ListEntityPools(ctx, entity) if err != nil { - return fmt.Errorf("error fetching repo pools: %w", err) + return errors.Wrap(err, "fetching repo pools") } if len(pools) > 0 { @@ -166,19 +158,10 @@ func (r *Runner) DeleteRepository(ctx context.Context, repoID string, keepWebhoo return runnerErrors.NewBadRequestError("repo has pools defined (%s)", strings.Join(poolIDs, ", ")) } - scaleSets, err := r.store.ListEntityScaleSets(ctx, entity) - if err != nil { - return fmt.Errorf("error fetching repo scale sets: %w", err) - } - - if len(scaleSets) > 0 { - return runnerErrors.NewBadRequestError("repo has scale sets defined; delete them first") - } - if !keepWebhook && r.config.Default.EnableWebhookManagement { poolMgr, err := r.poolManagerCtrl.GetRepoPoolManager(repo) if err != nil { - return fmt.Errorf("error fetching pool manager: %w", err) + return errors.Wrap(err, "fetching pool manager") } if err := poolMgr.UninstallWebhook(ctx); err != nil { @@ -191,11 +174,11 @@ func (r *Runner) DeleteRepository(ctx context.Context, repoID string, keepWebhoo } if err := r.poolManagerCtrl.DeleteRepoPoolManager(repo); err != nil { - return fmt.Errorf("error deleting repo pool manager: %w", err) + return errors.Wrap(err, "deleting repo pool manager") } if err := r.store.DeleteRepository(ctx, repoID); err != nil { - return fmt.Errorf("error removing repository: %w", err) + return errors.Wrap(err, "removing repository") } return nil } @@ -217,12 +200,12 @@ func (r *Runner) UpdateRepository(ctx context.Context, repoID string, param para slog.InfoContext(ctx, "updating repository", "repo_id", repoID, "param", param) repo, err := r.store.UpdateRepository(ctx, repoID, param) if err != nil { - return params.Repository{}, fmt.Errorf("error updating repo: %w", err) + return params.Repository{}, errors.Wrap(err, "updating repo") } poolMgr, err := r.poolManagerCtrl.GetRepoPoolManager(repo) if err != nil { - return params.Repository{}, fmt.Errorf("error getting pool manager: %w", err) + return params.Repository{}, errors.Wrap(err, "getting pool manager") } repo.PoolManagerStatus = poolMgr.Status() @@ -236,21 +219,21 @@ func (r *Runner) CreateRepoPool(ctx context.Context, repoID string, param params createPoolParams, err := r.appendTagsToCreatePoolParams(param) if err != nil { - return params.Pool{}, fmt.Errorf("error appending tags to create pool params: %w", err) + return params.Pool{}, errors.Wrap(err, "appending tags to create pool params") } if createPoolParams.RunnerBootstrapTimeout == 0 { createPoolParams.RunnerBootstrapTimeout = appdefaults.DefaultRunnerBootstrapTimeout } - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: repoID, - EntityType: params.ForgeEntityTypeRepository, + EntityType: params.GithubEntityTypeRepository, } pool, err := r.store.CreateEntityPool(ctx, entity, createPoolParams) if err != nil { - return params.Pool{}, fmt.Errorf("error creating pool: %w", err) + return params.Pool{}, errors.Wrap(err, "creating pool") } return pool, nil @@ -261,14 +244,14 @@ func (r *Runner) GetRepoPoolByID(ctx context.Context, repoID, poolID string) (pa return params.Pool{}, runnerErrors.ErrUnauthorized } - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: repoID, - EntityType: params.ForgeEntityTypeRepository, + EntityType: params.GithubEntityTypeRepository, } pool, err := r.store.GetEntityPool(ctx, entity, poolID) if err != nil { - return params.Pool{}, fmt.Errorf("error fetching pool: %w", err) + return params.Pool{}, errors.Wrap(err, "fetching pool") } return pool, nil @@ -279,13 +262,13 @@ func (r *Runner) DeleteRepoPool(ctx context.Context, repoID, poolID string) erro return runnerErrors.ErrUnauthorized } - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: repoID, - EntityType: params.ForgeEntityTypeRepository, + EntityType: params.GithubEntityTypeRepository, } pool, err := r.store.GetEntityPool(ctx, entity, poolID) if err != nil { - return fmt.Errorf("error fetching pool: %w", err) + return errors.Wrap(err, "fetching pool") } // nolint:golangci-lint,godox @@ -299,7 +282,7 @@ func (r *Runner) DeleteRepoPool(ctx context.Context, repoID, poolID string) erro } if err := r.store.DeleteEntityPool(ctx, entity, poolID); err != nil { - return fmt.Errorf("error deleting pool: %w", err) + return errors.Wrap(err, "deleting pool") } return nil } @@ -308,13 +291,13 @@ func (r *Runner) ListRepoPools(ctx context.Context, repoID string) ([]params.Poo if !auth.IsAdmin(ctx) { return []params.Pool{}, runnerErrors.ErrUnauthorized } - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: repoID, - EntityType: params.ForgeEntityTypeRepository, + EntityType: params.GithubEntityTypeRepository, } pools, err := r.store.ListEntityPools(ctx, entity) if err != nil { - return nil, fmt.Errorf("error fetching pools: %w", err) + return nil, errors.Wrap(err, "fetching pools") } return pools, nil } @@ -326,7 +309,7 @@ func (r *Runner) ListPoolInstances(ctx context.Context, poolID string) ([]params instances, err := r.store.ListPoolInstances(ctx, poolID) if err != nil { - return []params.Instance{}, fmt.Errorf("error fetching instances: %w", err) + return []params.Instance{}, errors.Wrap(err, "fetching instances") } return instances, nil } @@ -336,13 +319,13 @@ func (r *Runner) UpdateRepoPool(ctx context.Context, repoID, poolID string, para return params.Pool{}, runnerErrors.ErrUnauthorized } - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: repoID, - EntityType: params.ForgeEntityTypeRepository, + EntityType: params.GithubEntityTypeRepository, } pool, err := r.store.GetEntityPool(ctx, entity, poolID) if err != nil { - return params.Pool{}, fmt.Errorf("error fetching pool: %w", err) + return params.Pool{}, errors.Wrap(err, "fetching pool") } maxRunners := pool.MaxRunners @@ -361,7 +344,7 @@ func (r *Runner) UpdateRepoPool(ctx context.Context, repoID, poolID string, para newPool, err := r.store.UpdateEntityPool(ctx, entity, poolID, param) if err != nil { - return params.Pool{}, fmt.Errorf("error updating pool: %w", err) + return params.Pool{}, errors.Wrap(err, "updating pool") } return newPool, nil } @@ -370,13 +353,13 @@ func (r *Runner) ListRepoInstances(ctx context.Context, repoID string) ([]params if !auth.IsAdmin(ctx) { return nil, runnerErrors.ErrUnauthorized } - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: repoID, - EntityType: params.ForgeEntityTypeRepository, + EntityType: params.GithubEntityTypeRepository, } instances, err := r.store.ListEntityInstances(ctx, entity) if err != nil { - return []params.Instance{}, fmt.Errorf("error , errfetching instances: %w", err) + return []params.Instance{}, errors.Wrap(err, "fetching instances") } return instances, nil } @@ -387,12 +370,12 @@ func (r *Runner) findRepoPoolManager(owner, name, endpointName string) (common.P repo, err := r.store.GetRepository(r.ctx, owner, name, endpointName) if err != nil { - return nil, fmt.Errorf("error fetching repo: %w", err) + return nil, errors.Wrap(err, "fetching repo") } poolManager, err := r.poolManagerCtrl.GetRepoPoolManager(repo) if err != nil { - return nil, fmt.Errorf("error fetching pool manager for repo: %w", err) + return nil, errors.Wrap(err, "fetching pool manager for repo") } return poolManager, nil } @@ -404,17 +387,17 @@ func (r *Runner) InstallRepoWebhook(ctx context.Context, repoID string, param pa repo, err := r.store.GetRepositoryByID(ctx, repoID) if err != nil { - return params.HookInfo{}, fmt.Errorf("error fetching repo: %w", err) + return params.HookInfo{}, errors.Wrap(err, "fetching repo") } poolManager, err := r.poolManagerCtrl.GetRepoPoolManager(repo) if err != nil { - return params.HookInfo{}, fmt.Errorf("error fetching pool manager for repo: %w", err) + return params.HookInfo{}, errors.Wrap(err, "fetching pool manager for repo") } info, err := poolManager.InstallWebhook(ctx, param) if err != nil { - return params.HookInfo{}, fmt.Errorf("error installing webhook: %w", err) + return params.HookInfo{}, errors.Wrap(err, "installing webhook") } return info, nil } @@ -426,16 +409,16 @@ func (r *Runner) UninstallRepoWebhook(ctx context.Context, repoID string) error repo, err := r.store.GetRepositoryByID(ctx, repoID) if err != nil { - return fmt.Errorf("error fetching repo: %w", err) + return errors.Wrap(err, "fetching repo") } poolManager, err := r.poolManagerCtrl.GetRepoPoolManager(repo) if err != nil { - return fmt.Errorf("error fetching pool manager for repo: %w", err) + return errors.Wrap(err, "fetching pool manager for repo") } if err := poolManager.UninstallWebhook(ctx); err != nil { - return fmt.Errorf("error uninstalling webhook: %w", err) + return errors.Wrap(err, "uninstalling webhook") } return nil } @@ -447,17 +430,17 @@ func (r *Runner) GetRepoWebhookInfo(ctx context.Context, repoID string) (params. repo, err := r.store.GetRepositoryByID(ctx, repoID) if err != nil { - return params.HookInfo{}, fmt.Errorf("error fetching repo: %w", err) + return params.HookInfo{}, errors.Wrap(err, "fetching repo") } poolManager, err := r.poolManagerCtrl.GetRepoPoolManager(repo) if err != nil { - return params.HookInfo{}, fmt.Errorf("error fetching pool manager for repo: %w", err) + return params.HookInfo{}, errors.Wrap(err, "fetching pool manager for repo") } info, err := poolManager.GetWebhookInfo(ctx) if err != nil { - return params.HookInfo{}, fmt.Errorf("error getting webhook info: %w", err) + return params.HookInfo{}, errors.Wrap(err, "getting webhook info") } return info, nil } diff --git a/runner/repositories_test.go b/runner/repositories_test.go index 8f195ae3..c1aa04b4 100644 --- a/runner/repositories_test.go +++ b/runner/repositories_test.go @@ -16,10 +16,10 @@ package runner import ( "context" - "errors" "fmt" "testing" + "github.com/pkg/errors" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/suite" @@ -39,7 +39,7 @@ type RepoTestFixtures struct { Store dbCommon.Store StoreRepos map[string]params.Repository Providers map[string]common.Provider - Credentials map[string]params.ForgeCredentials + Credentials map[string]params.GithubCredentials CreateRepoParams params.CreateRepoParams CreatePoolParams params.CreatePoolParams CreateInstanceParams params.CreateInstanceParams @@ -60,11 +60,9 @@ type RepoTestSuite struct { Fixtures *RepoTestFixtures Runner *Runner - testCreds params.ForgeCredentials - secondaryTestCreds params.ForgeCredentials - giteaTestCreds params.ForgeCredentials - githubEndpoint params.ForgeEndpoint - giteaEndpoint params.ForgeEndpoint + testCreds params.GithubCredentials + secondaryTestCreds params.GithubCredentials + githubEndpoint params.GithubEndpoint } func (s *RepoTestSuite) SetupTest() { @@ -77,10 +75,8 @@ func (s *RepoTestSuite) SetupTest() { adminCtx := garmTesting.ImpersonateAdminContext(context.Background(), db, s.T()) s.githubEndpoint = garmTesting.CreateDefaultGithubEndpoint(adminCtx, db, s.T()) - s.giteaEndpoint = garmTesting.CreateDefaultGiteaEndpoint(adminCtx, db, s.T()) s.testCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "new-creds", db, s.T(), s.githubEndpoint) s.secondaryTestCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "secondary-creds", db, s.T(), s.githubEndpoint) - s.giteaTestCreds = garmTesting.CreateTestGiteaCredentials(adminCtx, "gitea-creds", db, s.T(), s.giteaEndpoint) // create some repository objects in the database, for testing purposes repos := map[string]params.Repository{} @@ -90,7 +86,7 @@ func (s *RepoTestSuite) SetupTest() { adminCtx, fmt.Sprintf("test-owner-%v", i), name, - s.testCreds, + s.testCreds.Name, fmt.Sprintf("test-webhook-secret-%v", i), params.PoolBalancerTypeRoundRobin, ) @@ -111,7 +107,7 @@ func (s *RepoTestSuite) SetupTest() { Providers: map[string]common.Provider{ "test-provider": providerMock, }, - Credentials: map[string]params.ForgeCredentials{ + Credentials: map[string]params.GithubCredentials{ s.testCreds.Name: s.testCreds, s.secondaryTestCreds.Name: s.secondaryTestCreds, }, @@ -120,7 +116,6 @@ func (s *RepoTestSuite) SetupTest() { Name: "test-repo-create", CredentialsName: s.testCreds.Name, WebhookSecret: "test-create-repo-webhook-secret", - ForgeType: params.GithubEndpointType, }, CreatePoolParams: params.CreatePoolParams{ ProviderName: "test-provider", @@ -240,7 +235,7 @@ func (s *RepoTestSuite) TestCreateRepositoryPoolMgrFailed() { s.Fixtures.PoolMgrMock.AssertExpectations(s.T()) s.Fixtures.PoolMgrCtrlMock.AssertExpectations(s.T()) - s.Require().Equal(fmt.Sprintf("error creating repo pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) + s.Require().Equal(fmt.Sprintf("creating repo pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) } func (s *RepoTestSuite) TestCreateRepositoryStartPoolMgrFailed() { @@ -252,87 +247,20 @@ func (s *RepoTestSuite) TestCreateRepositoryStartPoolMgrFailed() { s.Fixtures.PoolMgrMock.AssertExpectations(s.T()) s.Fixtures.PoolMgrCtrlMock.AssertExpectations(s.T()) - s.Require().Equal(fmt.Sprintf("error starting repo pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) + s.Require().Equal(fmt.Sprintf("starting repo pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) } func (s *RepoTestSuite) TestListRepositories() { s.Fixtures.PoolMgrCtrlMock.On("GetRepoPoolManager", mock.AnythingOfType("params.Repository")).Return(s.Fixtures.PoolMgrMock, nil) s.Fixtures.PoolMgrMock.On("Status").Return(params.PoolManagerStatus{IsRunning: true}, nil) - repos, err := s.Runner.ListRepositories(s.Fixtures.AdminContext, params.RepositoryFilter{}) + repos, err := s.Runner.ListRepositories(s.Fixtures.AdminContext) s.Require().Nil(err) garmTesting.EqualDBEntityByName(s.T(), garmTesting.DBEntityMapToSlice(s.Fixtures.StoreRepos), repos) } -func (s *RepoTestSuite) TestListRepositoriesWithFilters() { - s.Fixtures.PoolMgrCtrlMock.On("GetRepoPoolManager", mock.AnythingOfType("params.Repository")).Return(s.Fixtures.PoolMgrMock, nil) - s.Fixtures.PoolMgrMock.On("Status").Return(params.PoolManagerStatus{IsRunning: true}, nil) - - repo, err := s.Fixtures.Store.CreateRepository( - s.Fixtures.AdminContext, - "example-owner", - "example-repo", - s.testCreds, - "test-webhook-secret", - params.PoolBalancerTypeRoundRobin, - ) - if err != nil { - s.FailNow(fmt.Sprintf("failed to create database object (example-repo): %q", err)) - } - - repo2, err := s.Fixtures.Store.CreateRepository( - s.Fixtures.AdminContext, - "another-example-owner", - "example-repo", - s.testCreds, - "test-webhook-secret", - params.PoolBalancerTypeRoundRobin, - ) - if err != nil { - s.FailNow(fmt.Sprintf("failed to create database object (example-repo): %q", err)) - } - - repo3, err := s.Fixtures.Store.CreateRepository( - s.Fixtures.AdminContext, - "example-owner", - "example-repo", - s.giteaTestCreds, - "test-webhook-secret", - params.PoolBalancerTypeRoundRobin, - ) - if err != nil { - s.FailNow(fmt.Sprintf("failed to create database object (example-repo): %q", err)) - } - - repos, err := s.Runner.ListRepositories(s.Fixtures.AdminContext, params.RepositoryFilter{Name: "example-repo"}) - - s.Require().Nil(err) - garmTesting.EqualDBEntityByName(s.T(), []params.Repository{repo, repo2, repo3}, repos) - - repos, err = s.Runner.ListRepositories( - s.Fixtures.AdminContext, - params.RepositoryFilter{ - Name: "example-repo", - Owner: "example-owner", - }, - ) - s.Require().Nil(err) - garmTesting.EqualDBEntityByName(s.T(), []params.Repository{repo, repo3}, repos) - - repos, err = s.Runner.ListRepositories( - s.Fixtures.AdminContext, - params.RepositoryFilter{ - Name: "example-repo", - Owner: "example-owner", - Endpoint: s.giteaEndpoint.Name, - }, - ) - s.Require().Nil(err) - garmTesting.EqualDBEntityByName(s.T(), []params.Repository{repo3}, repos) -} - func (s *RepoTestSuite) TestListRepositoriesErrUnauthorized() { - _, err := s.Runner.ListRepositories(context.Background(), params.RepositoryFilter{}) + _, err := s.Runner.ListRepositories(context.Background()) s.Require().Equal(runnerErrors.ErrUnauthorized, err) } @@ -361,7 +289,7 @@ func (s *RepoTestSuite) TestDeleteRepository() { s.Require().Nil(err) _, err = s.Fixtures.Store.GetRepositoryByID(s.Fixtures.AdminContext, s.Fixtures.StoreRepos["test-repo-1"].ID) - s.Require().Equal("error fetching repo: not found", err.Error()) + s.Require().Equal("fetching repo: not found", err.Error()) } func (s *RepoTestSuite) TestDeleteRepositoryErrUnauthorized() { @@ -371,9 +299,9 @@ func (s *RepoTestSuite) TestDeleteRepositoryErrUnauthorized() { } func (s *RepoTestSuite) TestDeleteRepositoryPoolDefinedFailed() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreRepos["test-repo-1"].ID, - EntityType: params.ForgeEntityTypeRepository, + EntityType: params.GithubEntityTypeRepository, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -391,7 +319,7 @@ func (s *RepoTestSuite) TestDeleteRepositoryPoolMgrFailed() { err := s.Runner.DeleteRepository(s.Fixtures.AdminContext, s.Fixtures.StoreRepos["test-repo-1"].ID, true) s.Fixtures.PoolMgrCtrlMock.AssertExpectations(s.T()) - s.Require().Equal(fmt.Sprintf("error deleting repo pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) + s.Require().Equal(fmt.Sprintf("deleting repo pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) } func (s *RepoTestSuite) TestUpdateRepository() { @@ -445,7 +373,7 @@ func (s *RepoTestSuite) TestUpdateRepositoryPoolMgrFailed() { _, err := s.Runner.UpdateRepository(s.Fixtures.AdminContext, s.Fixtures.StoreRepos["test-repo-1"].ID, s.Fixtures.UpdateRepoParams) s.Fixtures.PoolMgrCtrlMock.AssertExpectations(s.T()) - s.Require().Equal(fmt.Sprintf("error getting pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) + s.Require().Equal(fmt.Sprintf("getting pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) } func (s *RepoTestSuite) TestUpdateRepositoryCreateRepoPoolMgrFailed() { @@ -454,7 +382,7 @@ func (s *RepoTestSuite) TestUpdateRepositoryCreateRepoPoolMgrFailed() { _, err := s.Runner.UpdateRepository(s.Fixtures.AdminContext, s.Fixtures.StoreRepos["test-repo-1"].ID, s.Fixtures.UpdateRepoParams) s.Fixtures.PoolMgrCtrlMock.AssertExpectations(s.T()) - s.Require().Equal(fmt.Sprintf("error getting pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) + s.Require().Equal(fmt.Sprintf("getting pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) } func (s *RepoTestSuite) TestCreateRepoPool() { @@ -491,9 +419,9 @@ func (s *RepoTestSuite) TestCreateRepoPoolFetchPoolParamsFailed() { } func (s *RepoTestSuite) TestGetRepoPoolByID() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreRepos["test-repo-1"].ID, - EntityType: params.ForgeEntityTypeRepository, + EntityType: params.GithubEntityTypeRepository, } repoPool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -513,9 +441,9 @@ func (s *RepoTestSuite) TestGetRepoPoolByIDErrUnauthorized() { } func (s *RepoTestSuite) TestDeleteRepoPool() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreRepos["test-repo-1"].ID, - EntityType: params.ForgeEntityTypeRepository, + EntityType: params.GithubEntityTypeRepository, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -527,7 +455,7 @@ func (s *RepoTestSuite) TestDeleteRepoPool() { s.Require().Nil(err) _, err = s.Fixtures.Store.GetEntityPool(s.Fixtures.AdminContext, entity, pool.ID) - s.Require().Equal("fetching pool: error finding pool: not found", err.Error()) + s.Require().Equal("fetching pool: finding pool: not found", err.Error()) } func (s *RepoTestSuite) TestDeleteRepoPoolErrUnauthorized() { @@ -537,9 +465,9 @@ func (s *RepoTestSuite) TestDeleteRepoPoolErrUnauthorized() { } func (s *RepoTestSuite) TestDeleteRepoPoolRunnersFailed() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreRepos["test-repo-1"].ID, - EntityType: params.ForgeEntityTypeRepository, + EntityType: params.GithubEntityTypeRepository, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -556,9 +484,9 @@ func (s *RepoTestSuite) TestDeleteRepoPoolRunnersFailed() { } func (s *RepoTestSuite) TestListRepoPools() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreRepos["test-repo-1"].ID, - EntityType: params.ForgeEntityTypeRepository, + EntityType: params.GithubEntityTypeRepository, } repoPools := []params.Pool{} for i := 1; i <= 2; i++ { @@ -583,9 +511,9 @@ func (s *RepoTestSuite) TestListRepoPoolsErrUnauthorized() { } func (s *RepoTestSuite) TestListPoolInstances() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreRepos["test-repo-1"].ID, - EntityType: params.ForgeEntityTypeRepository, + EntityType: params.GithubEntityTypeRepository, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -614,9 +542,9 @@ func (s *RepoTestSuite) TestListPoolInstancesErrUnauthorized() { } func (s *RepoTestSuite) TestUpdateRepoPool() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreRepos["test-repo-1"].ID, - EntityType: params.ForgeEntityTypeRepository, + EntityType: params.GithubEntityTypeRepository, } repoPool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -637,9 +565,9 @@ func (s *RepoTestSuite) TestUpdateRepoPoolErrUnauthorized() { } func (s *RepoTestSuite) TestUpdateRepoPoolMinIdleGreaterThanMax() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreRepos["test-repo-1"].ID, - EntityType: params.ForgeEntityTypeRepository, + EntityType: params.GithubEntityTypeRepository, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -656,9 +584,9 @@ func (s *RepoTestSuite) TestUpdateRepoPoolMinIdleGreaterThanMax() { } func (s *RepoTestSuite) TestListRepoInstances() { - entity := params.ForgeEntity{ + entity := params.GithubEntity{ ID: s.Fixtures.StoreRepos["test-repo-1"].ID, - EntityType: params.ForgeEntityTypeRepository, + EntityType: params.GithubEntityTypeRepository, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { diff --git a/runner/runner.go b/runner/runner.go index bf081522..5c0883aa 100644 --- a/runner/runner.go +++ b/runner/runner.go @@ -21,7 +21,6 @@ import ( "crypto/sha256" "encoding/hex" "encoding/json" - "errors" "fmt" "hash" "log/slog" @@ -31,6 +30,9 @@ import ( "sync" "time" + "github.com/juju/clock" + "github.com/juju/retry" + "github.com/pkg/errors" "golang.org/x/sync/errgroup" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -43,19 +45,17 @@ import ( "github.com/cloudbase/garm/runner/common" "github.com/cloudbase/garm/runner/pool" "github.com/cloudbase/garm/runner/providers" - "github.com/cloudbase/garm/util/github" - "github.com/cloudbase/garm/util/github/scalesets" ) func NewRunner(ctx context.Context, cfg config.Config, db dbCommon.Store) (*Runner, error) { ctrlID, err := db.ControllerInfo() if err != nil { - return nil, fmt.Errorf("error fetching controller info: %w", err) + return nil, errors.Wrap(err, "fetching controller info") } providers, err := providers.LoadProvidersFromConfig(ctx, cfg, ctrlID.ControllerID.String()) if err != nil { - return nil, fmt.Errorf("error loading providers: %w", err) + return nil, errors.Wrap(err, "loading providers") } creds := map[string]config.Github{} @@ -80,7 +80,7 @@ func NewRunner(ctx context.Context, cfg config.Config, db dbCommon.Store) (*Runn } if err := runner.loadReposOrgsAndEnterprises(); err != nil { - return nil, fmt.Errorf("error loading pool managers: %w", err) + return nil, errors.Wrap(err, "loading pool managers") } return runner, nil @@ -103,16 +103,16 @@ func (p *poolManagerCtrl) CreateRepoPoolManager(ctx context.Context, repo params entity, err := repo.GetEntity() if err != nil { - return nil, fmt.Errorf("error getting entity: %w", err) + return nil, errors.Wrap(err, "getting entity") } instanceTokenGetter, err := auth.NewInstanceTokenGetter(p.config.JWTAuth.Secret) if err != nil { - return nil, fmt.Errorf("error creating instance token getter: %w", err) + return nil, errors.Wrap(err, "creating instance token getter") } poolManager, err := pool.NewEntityPoolManager(ctx, entity, instanceTokenGetter, providers, store) if err != nil { - return nil, fmt.Errorf("error creating repo pool manager: %w", err) + return nil, errors.Wrap(err, "creating repo pool manager") } p.repositories[repo.ID] = poolManager return poolManager, nil @@ -122,7 +122,7 @@ func (p *poolManagerCtrl) GetRepoPoolManager(repo params.Repository) (common.Poo if repoPoolMgr, ok := p.repositories[repo.ID]; ok { return repoPoolMgr, nil } - return nil, fmt.Errorf("repository %s/%s pool manager not loaded: %w", repo.Owner, repo.Name, runnerErrors.ErrNotFound) + return nil, errors.Wrapf(runnerErrors.ErrNotFound, "repository %s/%s pool manager not loaded", repo.Owner, repo.Name) } func (p *poolManagerCtrl) DeleteRepoPoolManager(repo params.Repository) error { @@ -132,7 +132,7 @@ func (p *poolManagerCtrl) DeleteRepoPoolManager(repo params.Repository) error { poolMgr, ok := p.repositories[repo.ID] if ok { if err := poolMgr.Stop(); err != nil { - return fmt.Errorf("error stopping repo pool manager: %w", err) + return errors.Wrap(err, "stopping repo pool manager") } delete(p.repositories, repo.ID) } @@ -149,16 +149,16 @@ func (p *poolManagerCtrl) CreateOrgPoolManager(ctx context.Context, org params.O entity, err := org.GetEntity() if err != nil { - return nil, fmt.Errorf("error getting entity: %w", err) + return nil, errors.Wrap(err, "getting entity") } instanceTokenGetter, err := auth.NewInstanceTokenGetter(p.config.JWTAuth.Secret) if err != nil { - return nil, fmt.Errorf("error creating instance token getter: %w", err) + return nil, errors.Wrap(err, "creating instance token getter") } poolManager, err := pool.NewEntityPoolManager(ctx, entity, instanceTokenGetter, providers, store) if err != nil { - return nil, fmt.Errorf("error creating org pool manager: %w", err) + return nil, errors.Wrap(err, "creating org pool manager") } p.organizations[org.ID] = poolManager return poolManager, nil @@ -168,7 +168,7 @@ func (p *poolManagerCtrl) GetOrgPoolManager(org params.Organization) (common.Poo if orgPoolMgr, ok := p.organizations[org.ID]; ok { return orgPoolMgr, nil } - return nil, fmt.Errorf("organization %s pool manager not loaded: %w", org.Name, runnerErrors.ErrNotFound) + return nil, errors.Wrapf(runnerErrors.ErrNotFound, "organization %s pool manager not loaded", org.Name) } func (p *poolManagerCtrl) DeleteOrgPoolManager(org params.Organization) error { @@ -178,7 +178,7 @@ func (p *poolManagerCtrl) DeleteOrgPoolManager(org params.Organization) error { poolMgr, ok := p.organizations[org.ID] if ok { if err := poolMgr.Stop(); err != nil { - return fmt.Errorf("error stopping org pool manager: %w", err) + return errors.Wrap(err, "stopping org pool manager") } delete(p.organizations, org.ID) } @@ -195,16 +195,16 @@ func (p *poolManagerCtrl) CreateEnterprisePoolManager(ctx context.Context, enter entity, err := enterprise.GetEntity() if err != nil { - return nil, fmt.Errorf("error getting entity: %w", err) + return nil, errors.Wrap(err, "getting entity") } instanceTokenGetter, err := auth.NewInstanceTokenGetter(p.config.JWTAuth.Secret) if err != nil { - return nil, fmt.Errorf("error creating instance token getter: %w", err) + return nil, errors.Wrap(err, "creating instance token getter") } poolManager, err := pool.NewEntityPoolManager(ctx, entity, instanceTokenGetter, providers, store) if err != nil { - return nil, fmt.Errorf("error creating enterprise pool manager: %w", err) + return nil, errors.Wrap(err, "creating enterprise pool manager") } p.enterprises[enterprise.ID] = poolManager return poolManager, nil @@ -214,7 +214,7 @@ func (p *poolManagerCtrl) GetEnterprisePoolManager(enterprise params.Enterprise) if enterprisePoolMgr, ok := p.enterprises[enterprise.ID]; ok { return enterprisePoolMgr, nil } - return nil, fmt.Errorf("enterprise %s pool manager not loaded: %w", enterprise.Name, runnerErrors.ErrNotFound) + return nil, errors.Wrapf(runnerErrors.ErrNotFound, "enterprise %s pool manager not loaded", enterprise.Name) } func (p *poolManagerCtrl) DeleteEnterprisePoolManager(enterprise params.Enterprise) error { @@ -224,7 +224,7 @@ func (p *poolManagerCtrl) DeleteEnterprisePoolManager(enterprise params.Enterpri poolMgr, ok := p.enterprises[enterprise.ID] if ok { if err := poolMgr.Stop(); err != nil { - return fmt.Errorf("error stopping enterprise pool manager: %w", err) + return errors.Wrap(err, "stopping enterprise pool manager") } delete(p.enterprises, enterprise.ID) } @@ -254,12 +254,12 @@ func (r *Runner) UpdateController(ctx context.Context, param params.UpdateContro } if err := param.Validate(); err != nil { - return params.ControllerInfo{}, fmt.Errorf("error validating controller update params: %w", err) + return params.ControllerInfo{}, errors.Wrap(err, "validating controller update params") } info, err := r.store.UpdateController(param) if err != nil { - return params.ControllerInfo{}, fmt.Errorf("error updating controller info: %w", err) + return params.ControllerInfo{}, errors.Wrap(err, "updating controller info") } return info, nil } @@ -279,26 +279,26 @@ func (r *Runner) GetControllerInfo(ctx context.Context) (params.ControllerInfo, // As a side note, Windows requires a reboot for the hostname change to take effect, // so if we'll ever support Windows as a target system, the hostname can be cached. var hostname string - var err error - for range 10 { - hostname, err = os.Hostname() - if err != nil { - select { - case <-time.After(10 * time.Millisecond): - continue - case <-ctx.Done(): + err := retry.Call(retry.CallArgs{ + Func: func() error { + var err error + hostname, err = os.Hostname() + if err != nil { + return errors.Wrap(err, "fetching hostname") } - return params.ControllerInfo{}, fmt.Errorf("error fetching hostname: %w", err) - } - break - } + return nil + }, + Attempts: 10, + Delay: 100 * time.Millisecond, + Clock: clock.WallClock, + }) if err != nil { - return params.ControllerInfo{}, fmt.Errorf("error fetching hostname: %w", err) + return params.ControllerInfo{}, errors.Wrap(err, "fetching hostname") } info, err := r.store.ControllerInfo() if err != nil { - return params.ControllerInfo{}, fmt.Errorf("error fetching controller info: %w", err) + return params.ControllerInfo{}, errors.Wrap(err, "fetching controller info") } // This is temporary. Right now, GARM is a single-instance deployment. When we add the @@ -325,19 +325,19 @@ func (r *Runner) loadReposOrgsAndEnterprises() error { r.mux.Lock() defer r.mux.Unlock() - repos, err := r.store.ListRepositories(r.ctx, params.RepositoryFilter{}) + repos, err := r.store.ListRepositories(r.ctx) if err != nil { - return fmt.Errorf("error fetching repositories: %w", err) + return errors.Wrap(err, "fetching repositories") } - orgs, err := r.store.ListOrganizations(r.ctx, params.OrganizationFilter{}) + orgs, err := r.store.ListOrganizations(r.ctx) if err != nil { - return fmt.Errorf("error fetching organizations: %w", err) + return errors.Wrap(err, "fetching organizations") } - enterprises, err := r.store.ListEnterprises(r.ctx, params.EnterpriseFilter{}) + enterprises, err := r.store.ListEnterprises(r.ctx) if err != nil { - return fmt.Errorf("error fetching enterprises: %w", err) + return errors.Wrap(err, "fetching enterprises") } g, _ := errgroup.WithContext(r.ctx) @@ -382,17 +382,17 @@ func (r *Runner) Start() error { repositories, err := r.poolManagerCtrl.GetRepoPoolManagers() if err != nil { - return fmt.Errorf("error fetch repo pool managers: %w", err) + return errors.Wrap(err, "fetch repo pool managers") } organizations, err := r.poolManagerCtrl.GetOrgPoolManagers() if err != nil { - return fmt.Errorf("error fetch org pool managers: %w", err) + return errors.Wrap(err, "fetch org pool managers") } enterprises, err := r.poolManagerCtrl.GetEnterprisePoolManagers() if err != nil { - return fmt.Errorf("error fetch enterprise pool managers: %w", err) + return errors.Wrap(err, "fetch enterprise pool managers") } g, _ := errgroup.WithContext(r.ctx) @@ -448,17 +448,17 @@ func (r *Runner) Stop() error { repos, err := r.poolManagerCtrl.GetRepoPoolManagers() if err != nil { - return fmt.Errorf("error fetching repo pool managers: %w", err) + return errors.Wrap(err, "fetch repo pool managers") } orgs, err := r.poolManagerCtrl.GetOrgPoolManagers() if err != nil { - return fmt.Errorf("error fetching org pool managers: %w", err) + return errors.Wrap(err, "fetch org pool managers") } enterprises, err := r.poolManagerCtrl.GetEnterprisePoolManagers() if err != nil { - return fmt.Errorf("error fetching enterprise pool managers: %w", err) + return errors.Wrap(err, "fetch enterprise pool managers") } g, _ := errgroup.WithContext(r.ctx) @@ -510,17 +510,17 @@ func (r *Runner) Wait() error { repos, err := r.poolManagerCtrl.GetRepoPoolManagers() if err != nil { - return fmt.Errorf("error fetching repo pool managers: %w", err) + return errors.Wrap(err, "fetch repo pool managers") } orgs, err := r.poolManagerCtrl.GetOrgPoolManagers() if err != nil { - return fmt.Errorf("error fetching org pool managers: %w", err) + return errors.Wrap(err, "fetch org pool managers") } enterprises, err := r.poolManagerCtrl.GetEnterprisePoolManagers() if err != nil { - return fmt.Errorf("error fetching enterprise pool managers: %w", err) + return errors.Wrap(err, "fetch enterprise pool managers") } for poolID, repo := range repos { @@ -589,7 +589,7 @@ func (r *Runner) validateHookBody(signature, secret string, body []byte) error { mac := hmac.New(hashFunc, []byte(secret)) _, err := mac.Write(body) if err != nil { - return fmt.Errorf("failed to compute sha256: %w", err) + return errors.Wrap(err, "failed to compute sha256") } expectedMAC := hex.EncodeToString(mac.Sum(nil)) @@ -600,10 +600,10 @@ func (r *Runner) validateHookBody(signature, secret string, body []byte) error { return nil } -func (r *Runner) findEndpointForJob(job params.WorkflowJob, forgeType params.EndpointType) (params.ForgeEndpoint, error) { +func (r *Runner) findEndpointForJob(job params.WorkflowJob) (params.GithubEndpoint, error) { uri, err := url.ParseRequestURI(job.WorkflowJob.HTMLURL) if err != nil { - return params.ForgeEndpoint{}, fmt.Errorf("error parsing job URL: %w", err) + return params.GithubEndpoint{}, errors.Wrap(err, "parsing job URL") } baseURI := fmt.Sprintf("%s://%s", uri.Scheme, uri.Host) @@ -612,46 +612,32 @@ func (r *Runner) findEndpointForJob(job params.WorkflowJob, forgeType params.End // a GHES involved, those users will have just one extra endpoint or 2 (if they also have a // test env). But there should be a relatively small number, regardless. So we don't really care // that much about the performance of this function. - var endpoints []params.ForgeEndpoint - switch forgeType { - case params.GithubEndpointType: - endpoints, err = r.store.ListGithubEndpoints(r.ctx) - case params.GiteaEndpointType: - endpoints, err = r.store.ListGiteaEndpoints(r.ctx) - default: - return params.ForgeEndpoint{}, runnerErrors.NewBadRequestError("unknown forge type %s", forgeType) - } - + endpoints, err := r.store.ListGithubEndpoints(r.ctx) if err != nil { - return params.ForgeEndpoint{}, fmt.Errorf("error fetching github endpoints: %w", err) + return params.GithubEndpoint{}, errors.Wrap(err, "fetching github endpoints") } for _, ep := range endpoints { - slog.DebugContext(r.ctx, "checking endpoint", "base_uri", baseURI, "endpoint", ep.BaseURL) - epBaseURI := strings.TrimSuffix(ep.BaseURL, "/") - if epBaseURI == baseURI { + if ep.BaseURL == baseURI { return ep, nil } } - return params.ForgeEndpoint{}, runnerErrors.NewNotFoundError("no endpoint found for job") + return params.GithubEndpoint{}, runnerErrors.NewNotFoundError("no endpoint found for job") } -func (r *Runner) DispatchWorkflowJob(hookTargetType, signature string, forgeType params.EndpointType, jobData []byte) error { +func (r *Runner) DispatchWorkflowJob(hookTargetType, signature string, jobData []byte) error { if len(jobData) == 0 { - slog.ErrorContext(r.ctx, "missing job data") return runnerErrors.NewBadRequestError("missing job data") } var job params.WorkflowJob if err := json.Unmarshal(jobData, &job); err != nil { - slog.ErrorContext(r.ctx, "failed to unmarshal job data", "error", err) - return fmt.Errorf("invalid job data %s: %w", err, runnerErrors.ErrBadRequest) + return errors.Wrapf(runnerErrors.ErrBadRequest, "invalid job data: %s", err) } - endpoint, err := r.findEndpointForJob(job, forgeType) + endpoint, err := r.findEndpointForJob(job) if err != nil { - slog.ErrorContext(r.ctx, "failed to find endpoint for job", "error", err) - return fmt.Errorf("error finding endpoint for job: %w", err) + return errors.Wrap(err, "finding endpoint for job") } var poolManager common.PoolManager @@ -661,44 +647,37 @@ func (r *Runner) DispatchWorkflowJob(hookTargetType, signature string, forgeType slog.DebugContext( r.ctx, "got hook for repo", "repo_owner", util.SanitizeLogEntry(job.Repository.Owner.Login), - "repo_name", util.SanitizeLogEntry(job.Repository.Name), - "endpoint", endpoint.Name) + "repo_name", util.SanitizeLogEntry(job.Repository.Name)) poolManager, err = r.findRepoPoolManager(job.Repository.Owner.Login, job.Repository.Name, endpoint.Name) case OrganizationHook: slog.DebugContext( r.ctx, "got hook for organization", - "organization", util.SanitizeLogEntry(job.GetOrgName(forgeType)), - "endpoint", endpoint.Name) - poolManager, err = r.findOrgPoolManager(job.GetOrgName(forgeType), endpoint.Name) + "organization", util.SanitizeLogEntry(job.Organization.Login)) + poolManager, err = r.findOrgPoolManager(job.Organization.Login, endpoint.Name) case EnterpriseHook: slog.DebugContext( r.ctx, "got hook for enterprise", - "enterprise", util.SanitizeLogEntry(job.Enterprise.Slug), - "endpoint", endpoint.Name) + "enterprise", util.SanitizeLogEntry(job.Enterprise.Slug)) poolManager, err = r.findEnterprisePoolManager(job.Enterprise.Slug, endpoint.Name) default: return runnerErrors.NewBadRequestError("cannot handle hook target type %s", hookTargetType) } - slog.DebugContext(r.ctx, "found pool manager", "pool_manager", poolManager.ID()) if err != nil { - slog.ErrorContext(r.ctx, "failed to find pool manager", "error", err, "hook_target_type", hookTargetType) // We don't have a repository or organization configured that // can handle this workflow job. - return fmt.Errorf("error fetching poolManager: %w", err) + return errors.Wrap(err, "fetching poolManager") } // We found a pool. Validate the webhook job. If a secret is configured, // we make sure that the source of this workflow job is valid. secret := poolManager.WebhookSecret() if err := r.validateHookBody(signature, secret, jobData); err != nil { - slog.ErrorContext(r.ctx, "failed to validate webhook data", "error", err) - return fmt.Errorf("error validating webhook data: %w", err) + return errors.Wrap(err, "validating webhook data") } if err := poolManager.HandleWorkflowJob(job); err != nil { - slog.ErrorContext(r.ctx, "failed to handle workflow job", "error", err) - return fmt.Errorf("error handling workflow job: %w", err) + return errors.Wrap(err, "handling workflow job") } return nil @@ -731,9 +710,9 @@ func (r *Runner) GetInstance(ctx context.Context, instanceName string) (params.I return params.Instance{}, runnerErrors.ErrUnauthorized } - instance, err := r.store.GetInstance(ctx, instanceName) + instance, err := r.store.GetInstanceByName(ctx, instanceName) if err != nil { - return params.Instance{}, fmt.Errorf("error fetching instance: %w", err) + return params.Instance{}, errors.Wrap(err, "fetching instance") } return instance, nil } @@ -745,7 +724,7 @@ func (r *Runner) ListAllInstances(ctx context.Context) ([]params.Instance, error instances, err := r.store.ListAllInstances(ctx) if err != nil { - return nil, fmt.Errorf("error fetching instances: %w", err) + return nil, errors.Wrap(err, "fetching instances") } return instances, nil } @@ -757,7 +736,7 @@ func (r *Runner) AddInstanceStatusMessage(ctx context.Context, param params.Inst } if err := r.store.AddInstanceEvent(ctx, instanceName, params.StatusEvent, params.EventInfo, param.Message); err != nil { - return fmt.Errorf("error adding status update: %w", err) + return errors.Wrap(err, "adding status update") } updateParams := params.UpdateInstanceParams{ @@ -769,7 +748,7 @@ func (r *Runner) AddInstanceStatusMessage(ctx context.Context, param params.Inst } if _, err := r.store.UpdateInstance(r.ctx, instanceName, updateParams); err != nil { - return fmt.Errorf("error updating runner agent ID: %w", err) + return errors.Wrap(err, "updating runner agent ID") } return nil @@ -797,7 +776,7 @@ func (r *Runner) UpdateSystemInfo(ctx context.Context, param params.UpdateSystem } if _, err := r.store.UpdateInstance(r.ctx, instanceName, updateParams); err != nil { - return fmt.Errorf("error updating runner system info: %w", err) + return errors.Wrap(err, "updating runner system info") } return nil @@ -806,7 +785,7 @@ func (r *Runner) UpdateSystemInfo(ctx context.Context, param params.UpdateSystem func (r *Runner) getPoolManagerFromInstance(ctx context.Context, instance params.Instance) (common.PoolManager, error) { pool, err := r.store.GetPoolByID(ctx, instance.PoolID) if err != nil { - return nil, fmt.Errorf("error fetching pool: %w", err) + return nil, errors.Wrap(err, "fetching pool") } var poolMgr common.PoolManager @@ -815,29 +794,29 @@ func (r *Runner) getPoolManagerFromInstance(ctx context.Context, instance params case pool.RepoID != "": repo, err := r.store.GetRepositoryByID(ctx, pool.RepoID) if err != nil { - return nil, fmt.Errorf("error fetching repo: %w", err) + return nil, errors.Wrap(err, "fetching repo") } poolMgr, err = r.findRepoPoolManager(repo.Owner, repo.Name, repo.Endpoint.Name) if err != nil { - return nil, fmt.Errorf("error fetching pool manager for repo %s: %w", pool.RepoName, err) + return nil, errors.Wrapf(err, "fetching pool manager for repo %s", pool.RepoName) } case pool.OrgID != "": org, err := r.store.GetOrganizationByID(ctx, pool.OrgID) if err != nil { - return nil, fmt.Errorf("error fetching org: %w", err) + return nil, errors.Wrap(err, "fetching org") } poolMgr, err = r.findOrgPoolManager(org.Name, org.Endpoint.Name) if err != nil { - return nil, fmt.Errorf("error fetching pool manager for org %s: %w", pool.OrgName, err) + return nil, errors.Wrapf(err, "fetching pool manager for org %s", pool.OrgName) } case pool.EnterpriseID != "": enterprise, err := r.store.GetEnterpriseByID(ctx, pool.EnterpriseID) if err != nil { - return nil, fmt.Errorf("error fetching enterprise: %w", err) + return nil, errors.Wrap(err, "fetching enterprise") } poolMgr, err = r.findEnterprisePoolManager(enterprise.Name, enterprise.Endpoint.Name) if err != nil { - return nil, fmt.Errorf("error fetching pool manager for enterprise %s: %w", pool.EnterpriseName, err) + return nil, errors.Wrapf(err, "fetching pool manager for enterprise %s", pool.EnterpriseName) } } @@ -852,9 +831,9 @@ func (r *Runner) DeleteRunner(ctx context.Context, instanceName string, forceDel return runnerErrors.ErrUnauthorized } - instance, err := r.store.GetInstance(ctx, instanceName) + instance, err := r.store.GetInstanceByName(ctx, instanceName) if err != nil { - return fmt.Errorf("error fetching instance: %w", err) + return errors.Wrap(err, "fetching instance") } switch instance.Status { @@ -870,98 +849,13 @@ func (r *Runner) DeleteRunner(ctx context.Context, instanceName string, forceDel return runnerErrors.NewBadRequestError("runner must be in one of the following states: %q", strings.Join(validStates, ", ")) } - ghCli, ssCli, err := r.getGHCliFromInstance(ctx, instance) + poolMgr, err := r.getPoolManagerFromInstance(ctx, instance) if err != nil { - return fmt.Errorf("error fetching github client: %w", err) + return errors.Wrap(err, "fetching pool manager for instance") } - if instance.AgentID != 0 { - switch { - case instance.ScaleSetID != 0: - err = ssCli.RemoveRunner(ctx, instance.AgentID) - case instance.PoolID != "": - err = ghCli.RemoveEntityRunner(ctx, instance.AgentID) - default: - return errors.New("instance does not have a pool or scale set") - } - - if err != nil { - if !errors.Is(err, runnerErrors.ErrNotFound) { - if errors.Is(err, runnerErrors.ErrUnauthorized) && instance.PoolID != "" { - poolMgr, err := r.getPoolManagerFromInstance(ctx, instance) - if err != nil { - return fmt.Errorf("error fetching pool manager for instance: %w", err) - } - poolMgr.SetPoolRunningState(false, fmt.Sprintf("failed to remove runner: %q", err)) - } - if !bypassGithubUnauthorized { - return fmt.Errorf("error removing runner from github: %w", err) - } - } - } + if err := poolMgr.DeleteRunner(instance, forceDelete, bypassGithubUnauthorized); err != nil { + return errors.Wrap(err, "removing runner") } - - instanceStatus := commonParams.InstancePendingDelete - if forceDelete { - instanceStatus = commonParams.InstancePendingForceDelete - } - - slog.InfoContext( - r.ctx, "setting instance status", - "runner_name", instance.Name, - "status", instanceStatus) - - updateParams := params.UpdateInstanceParams{ - Status: instanceStatus, - } - _, err = r.store.UpdateInstance(r.ctx, instance.Name, updateParams) - if err != nil { - return fmt.Errorf("error updating runner state: %w", err) - } - return nil } - -func (r *Runner) getGHCliFromInstance(ctx context.Context, instance params.Instance) (common.GithubClient, *scalesets.ScaleSetClient, error) { - // nolint:golangci-lint,godox - // TODO(gabriel-samfira): We can probably cache the entity. - var entityGetter params.EntityGetter - var err error - - switch { - case instance.PoolID != "": - entityGetter, err = r.store.GetPoolByID(ctx, instance.PoolID) - if err != nil { - return nil, nil, fmt.Errorf("error fetching pool: %w", err) - } - case instance.ScaleSetID != 0: - entityGetter, err = r.store.GetScaleSetByID(ctx, instance.ScaleSetID) - if err != nil { - return nil, nil, fmt.Errorf("error fetching scale set: %w", err) - } - default: - return nil, nil, errors.New("instance does not have a pool or scale set") - } - - entity, err := entityGetter.GetEntity() - if err != nil { - return nil, nil, fmt.Errorf("error fetching entity: %w", err) - } - - // Fetching the entity from the database will populate all fields, including credentials. - entity, err = r.store.GetForgeEntity(ctx, entity.EntityType, entity.ID) - if err != nil { - return nil, nil, fmt.Errorf("error fetching entity: %w", err) - } - - ghCli, err := github.Client(ctx, entity) - if err != nil { - return nil, nil, fmt.Errorf("error creating github client: %w", err) - } - - scaleSetCli, err := scalesets.NewClient(ghCli) - if err != nil { - return nil, nil, fmt.Errorf("error creating scaleset client: %w", err) - } - return ghCli, scaleSetCli, nil -} diff --git a/runner/scalesets.go b/runner/scalesets.go deleted file mode 100644 index 136ddec2..00000000 --- a/runner/scalesets.go +++ /dev/null @@ -1,297 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package runner - -import ( - "context" - "errors" - "fmt" - "log/slog" - - runnerErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm/auth" - "github.com/cloudbase/garm/params" - "github.com/cloudbase/garm/util/appdefaults" - "github.com/cloudbase/garm/util/github" - "github.com/cloudbase/garm/util/github/scalesets" -) - -func (r *Runner) ListAllScaleSets(ctx context.Context) ([]params.ScaleSet, error) { - if !auth.IsAdmin(ctx) { - return []params.ScaleSet{}, runnerErrors.ErrUnauthorized - } - - scalesets, err := r.store.ListAllScaleSets(ctx) - if err != nil { - return nil, fmt.Errorf("error fetching pools: %w", err) - } - return scalesets, nil -} - -func (r *Runner) GetScaleSetByID(ctx context.Context, scaleSet uint) (params.ScaleSet, error) { - if !auth.IsAdmin(ctx) { - return params.ScaleSet{}, runnerErrors.ErrUnauthorized - } - - set, err := r.store.GetScaleSetByID(ctx, scaleSet) - if err != nil { - return params.ScaleSet{}, fmt.Errorf("error fetching scale set: %w", err) - } - return set, nil -} - -func (r *Runner) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) error { - if !auth.IsAdmin(ctx) { - return runnerErrors.ErrUnauthorized - } - - scaleSet, err := r.store.GetScaleSetByID(ctx, scaleSetID) - if err != nil { - if !errors.Is(err, runnerErrors.ErrNotFound) { - return fmt.Errorf("error fetching scale set: %w", err) - } - return nil - } - - if len(scaleSet.Instances) > 0 { - return runnerErrors.NewBadRequestError("scale set has runners") - } - - if scaleSet.Enabled { - return runnerErrors.NewBadRequestError("scale set is enabled; disable it first") - } - - paramEntity, err := scaleSet.GetEntity() - if err != nil { - return fmt.Errorf("error getting entity: %w", err) - } - - entity, err := r.store.GetForgeEntity(ctx, paramEntity.EntityType, paramEntity.ID) - if err != nil { - return fmt.Errorf("error getting entity: %w", err) - } - - ghCli, err := github.Client(ctx, entity) - if err != nil { - return fmt.Errorf("error creating github client: %w", err) - } - - scalesetCli, err := scalesets.NewClient(ghCli) - if err != nil { - return fmt.Errorf("error getting scaleset client: %w", err) - } - - slog.DebugContext(ctx, "deleting scale set", "scale_set_id", scaleSet.ScaleSetID) - if err := scalesetCli.DeleteRunnerScaleSet(ctx, scaleSet.ScaleSetID); err != nil { - if !errors.Is(err, runnerErrors.ErrNotFound) { - slog.InfoContext(ctx, "scale set not found", "scale_set_id", scaleSet.ScaleSetID) - return nil - } - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to delete scale set from github") - return fmt.Errorf("error deleting scale set from github: %w", err) - } - if err := r.store.DeleteScaleSetByID(ctx, scaleSetID); err != nil { - return fmt.Errorf("error deleting scale set: %w", err) - } - return nil -} - -func (r *Runner) UpdateScaleSetByID(ctx context.Context, scaleSetID uint, param params.UpdateScaleSetParams) (params.ScaleSet, error) { - if !auth.IsAdmin(ctx) { - return params.ScaleSet{}, runnerErrors.ErrUnauthorized - } - - scaleSet, err := r.store.GetScaleSetByID(ctx, scaleSetID) - if err != nil { - return params.ScaleSet{}, fmt.Errorf("error fetching scale set: %w", err) - } - - maxRunners := scaleSet.MaxRunners - minIdleRunners := scaleSet.MinIdleRunners - - if param.MaxRunners != nil { - maxRunners = *param.MaxRunners - } - if param.MinIdleRunners != nil { - minIdleRunners = *param.MinIdleRunners - } - - if param.RunnerBootstrapTimeout != nil && *param.RunnerBootstrapTimeout == 0 { - return params.ScaleSet{}, runnerErrors.NewBadRequestError("runner_bootstrap_timeout cannot be 0") - } - - if minIdleRunners > maxRunners { - return params.ScaleSet{}, runnerErrors.NewBadRequestError("min_idle_runners cannot be larger than max_runners") - } - - paramEntity, err := scaleSet.GetEntity() - if err != nil { - return params.ScaleSet{}, fmt.Errorf("error getting entity: %w", err) - } - - entity, err := r.store.GetForgeEntity(ctx, paramEntity.EntityType, paramEntity.ID) - if err != nil { - return params.ScaleSet{}, fmt.Errorf("error getting entity: %w", err) - } - - ghCli, err := github.Client(ctx, entity) - if err != nil { - return params.ScaleSet{}, fmt.Errorf("error creating github client: %w", err) - } - - scalesetCli, err := scalesets.NewClient(ghCli) - if err != nil { - return params.ScaleSet{}, fmt.Errorf("error getting scaleset client: %w", err) - } - - callback := func(old, newSet params.ScaleSet) error { - updateParams := params.RunnerScaleSet{} - hasUpdates := false - if old.Name != newSet.Name { - updateParams.Name = newSet.Name - hasUpdates = true - } - - if old.GitHubRunnerGroup != newSet.GitHubRunnerGroup { - runnerGroup, err := scalesetCli.GetRunnerGroupByName(ctx, newSet.GitHubRunnerGroup) - if err != nil { - return fmt.Errorf("error fetching runner group from github: %w", err) - } - updateParams.RunnerGroupID = runnerGroup.ID - hasUpdates = true - } - - if old.DisableUpdate != newSet.DisableUpdate { - updateParams.RunnerSetting.DisableUpdate = newSet.DisableUpdate - hasUpdates = true - } - - if hasUpdates { - _, err := scalesetCli.UpdateRunnerScaleSet(ctx, newSet.ScaleSetID, updateParams) - if err != nil { - return fmt.Errorf("failed to update scaleset in github: %w", err) - } - } - return nil - } - - newScaleSet, err := r.store.UpdateEntityScaleSet(ctx, entity, scaleSetID, param, callback) - if err != nil { - return params.ScaleSet{}, fmt.Errorf("error updating pool: %w", err) - } - return newScaleSet, nil -} - -func (r *Runner) CreateEntityScaleSet(ctx context.Context, entityType params.ForgeEntityType, entityID string, param params.CreateScaleSetParams) (scaleSetRet params.ScaleSet, err error) { - if !auth.IsAdmin(ctx) { - return params.ScaleSet{}, runnerErrors.ErrUnauthorized - } - - if param.RunnerBootstrapTimeout == 0 { - param.RunnerBootstrapTimeout = appdefaults.DefaultRunnerBootstrapTimeout - } - - if param.GitHubRunnerGroup == "" { - param.GitHubRunnerGroup = "Default" - } - - entity, err := r.store.GetForgeEntity(ctx, entityType, entityID) - if err != nil { - return params.ScaleSet{}, fmt.Errorf("error getting entity: %w", err) - } - - if entity.Credentials.ForgeType != params.GithubEndpointType { - return params.ScaleSet{}, runnerErrors.NewBadRequestError("scale sets are only supported for github entities") - } - - ghCli, err := github.Client(ctx, entity) - if err != nil { - return params.ScaleSet{}, fmt.Errorf("error creating github client: %w", err) - } - - scalesetCli, err := scalesets.NewClient(ghCli) - if err != nil { - return params.ScaleSet{}, fmt.Errorf("error getting scaleset client: %w", err) - } - - runnerGroupID, err := ghCli.GetEntityRunnerGroupIDByName(ctx, param.GitHubRunnerGroup) - if err != nil { - return params.ScaleSet{}, fmt.Errorf("failed to get github runner group for entity %s: %w", entity.ID, err) - } - - createParam := ¶ms.RunnerScaleSet{ - Name: param.Name, - RunnerGroupID: runnerGroupID, - Labels: []params.Label{ - { - Name: param.Name, - Type: "System", - }, - }, - RunnerSetting: params.RunnerSetting{ - Ephemeral: true, - DisableUpdate: param.DisableUpdate, - }, - Enabled: ¶m.Enabled, - } - - runnerScaleSet, err := scalesetCli.CreateRunnerScaleSet(ctx, createParam) - if err != nil { - return params.ScaleSet{}, fmt.Errorf("error creating runner scale set: %w", err) - } - - defer func() { - if err != nil { - if innerErr := scalesetCli.DeleteRunnerScaleSet(ctx, runnerScaleSet.ID); innerErr != nil { - slog.With(slog.Any("error", innerErr)).ErrorContext(ctx, "failed to cleanup scale set") - } - } - }() - param.ScaleSetID = runnerScaleSet.ID - - scaleSet, err := r.store.CreateEntityScaleSet(ctx, entity, param) - if err != nil { - return params.ScaleSet{}, fmt.Errorf("error creating scale set: %w", err) - } - - return scaleSet, nil -} - -func (r *Runner) ListScaleSetInstances(ctx context.Context, scalesetID uint) ([]params.Instance, error) { - if !auth.IsAdmin(ctx) { - return nil, runnerErrors.ErrUnauthorized - } - - instances, err := r.store.ListScaleSetInstances(ctx, scalesetID) - if err != nil { - return []params.Instance{}, fmt.Errorf("error fetching instances: %w", err) - } - return instances, nil -} - -func (r *Runner) ListEntityScaleSets(ctx context.Context, entityType params.ForgeEntityType, entityID string) ([]params.ScaleSet, error) { - if !auth.IsAdmin(ctx) { - return []params.ScaleSet{}, runnerErrors.ErrUnauthorized - } - entity := params.ForgeEntity{ - ID: entityID, - EntityType: entityType, - } - scaleSets, err := r.store.ListEntityScaleSets(ctx, entity) - if err != nil { - return nil, fmt.Errorf("error fetching scale sets: %w", err) - } - return scaleSets, nil -} diff --git a/test/integration/client_utils.go b/test/integration/client_utils.go index e423c107..a0f17893 100644 --- a/test/integration/client_utils.go +++ b/test/integration/client_utils.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package integration import ( @@ -65,7 +51,7 @@ func listCredentials(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfo return listCredentialsResponse.Payload, nil } -func createGithubCredentials(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter, credentialsParams params.CreateGithubCredentialsParams) (*params.ForgeCredentials, error) { +func createGithubCredentials(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter, credentialsParams params.CreateGithubCredentialsParams) (*params.GithubCredentials, error) { createCredentialsResponse, err := apiCli.Credentials.CreateCredentials( clientCredentials.NewCreateCredentialsParams().WithBody(credentialsParams), apiAuthToken) @@ -81,7 +67,7 @@ func deleteGithubCredentials(apiCli *client.GarmAPI, apiAuthToken runtime.Client apiAuthToken) } -func updateGithubCredentials(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter, credentialsID int64, credentialsParams params.UpdateGithubCredentialsParams) (*params.ForgeCredentials, error) { +func updateGithubCredentials(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter, credentialsID int64, credentialsParams params.UpdateGithubCredentialsParams) (*params.GithubCredentials, error) { updateCredentialsResponse, err := apiCli.Credentials.UpdateCredentials( clientCredentials.NewUpdateCredentialsParams().WithID(credentialsID).WithBody(credentialsParams), apiAuthToken) @@ -91,7 +77,7 @@ func updateGithubCredentials(apiCli *client.GarmAPI, apiAuthToken runtime.Client return &updateCredentialsResponse.Payload, nil } -func createGithubEndpoint(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter, endpointParams params.CreateGithubEndpointParams) (*params.ForgeEndpoint, error) { +func createGithubEndpoint(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter, endpointParams params.CreateGithubEndpointParams) (*params.GithubEndpoint, error) { createEndpointResponse, err := apiCli.Endpoints.CreateGithubEndpoint( clientEndpoints.NewCreateGithubEndpointParams().WithBody(endpointParams), apiAuthToken) @@ -101,7 +87,7 @@ func createGithubEndpoint(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAut return &createEndpointResponse.Payload, nil } -func listGithubEndpoints(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter) (params.ForgeEndpoints, error) { +func listGithubEndpoints(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter) (params.GithubEndpoints, error) { listEndpointsResponse, err := apiCli.Endpoints.ListGithubEndpoints( clientEndpoints.NewListGithubEndpointsParams(), apiAuthToken) @@ -111,7 +97,7 @@ func listGithubEndpoints(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuth return listEndpointsResponse.Payload, nil } -func getGithubEndpoint(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter, endpointName string) (*params.ForgeEndpoint, error) { +func getGithubEndpoint(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter, endpointName string) (*params.GithubEndpoint, error) { getEndpointResponse, err := apiCli.Endpoints.GetGithubEndpoint( clientEndpoints.NewGetGithubEndpointParams().WithName(endpointName), apiAuthToken) @@ -127,7 +113,7 @@ func deleteGithubEndpoint(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAut apiAuthToken) } -func updateGithubEndpoint(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter, endpointName string, endpointParams params.UpdateGithubEndpointParams) (*params.ForgeEndpoint, error) { +func updateGithubEndpoint(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter, endpointName string, endpointParams params.UpdateGithubEndpointParams) (*params.GithubEndpoint, error) { updateEndpointResponse, err := apiCli.Endpoints.UpdateGithubEndpoint( clientEndpoints.NewUpdateGithubEndpointParams().WithName(endpointName).WithBody(endpointParams), apiAuthToken) diff --git a/test/integration/credentials_test.go b/test/integration/credentials_test.go index 9b9387f6..b83f131f 100644 --- a/test/integration/credentials_test.go +++ b/test/integration/credentials_test.go @@ -1,19 +1,6 @@ //go:build integration // +build integration -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package integration import ( @@ -31,14 +18,14 @@ func (suite *GarmSuite) TestGithubCredentialsErrorOnDuplicateCredentialsName() { creds, err := suite.createDummyCredentials(dummyCredentialsName, defaultEndpointName) suite.NoError(err) t.Cleanup(func() { - suite.DeleteGithubCredential(int64(creds.ID)) //nolint:gosec + suite.DeleteGithubCredential(int64(creds.ID)) }) createCredsParams := params.CreateGithubCredentialsParams{ Name: dummyCredentialsName, Endpoint: defaultEndpointName, Description: "GARM test credentials", - AuthType: params.ForgeAuthTypePAT, + AuthType: params.GithubAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "dummy", }, @@ -67,10 +54,10 @@ func (suite *GarmSuite) TestGithubCredentialsFailsToDeleteWhenInUse() { suite.NoError(err) t.Cleanup(func() { deleteRepo(suite.cli, suite.authToken, repo.ID) - deleteGithubCredentials(suite.cli, suite.authToken, int64(creds.ID)) //nolint:gosec + deleteGithubCredentials(suite.cli, suite.authToken, int64(creds.ID)) }) - err = deleteGithubCredentials(suite.cli, suite.authToken, int64(creds.ID)) //nolint:gosec + err = deleteGithubCredentials(suite.cli, suite.authToken, int64(creds.ID)) suite.Error(err, "expected error when deleting credentials in use") } @@ -81,7 +68,7 @@ func (suite *GarmSuite) TestGithubCredentialsFailsOnInvalidAuthType() { Name: dummyCredentialsName, Endpoint: defaultEndpointName, Description: "GARM test credentials", - AuthType: params.ForgeAuthType("invalid"), + AuthType: params.GithubAuthType("invalid"), PAT: params.GithubPAT{ OAuth2Token: "dummy", }, @@ -100,7 +87,7 @@ func (suite *GarmSuite) TestGithubCredentialsFailsWhenAuthTypeParamsAreIncorrect Name: dummyCredentialsName, Endpoint: defaultEndpointName, Description: "GARM test credentials", - AuthType: params.ForgeAuthTypePAT, + AuthType: params.GithubAuthTypePAT, App: params.GithubApp{ AppID: 123, InstallationID: 456, @@ -120,7 +107,7 @@ func (suite *GarmSuite) TestGithubCredentialsFailsWhenAuthTypeParamsAreMissing() Name: dummyCredentialsName, Endpoint: defaultEndpointName, Description: "GARM test credentials", - AuthType: params.ForgeAuthTypeApp, + AuthType: params.GithubAuthTypeApp, } _, err := createGithubCredentials(suite.cli, suite.authToken, createCredsParams) suite.Error(err, "expected error when creating credentials with missing auth type params") @@ -133,7 +120,7 @@ func (suite *GarmSuite) TestGithubCredentialsUpdateFailsWhenBothPATAndAppAreSupp creds, err := suite.createDummyCredentials(dummyCredentialsName, defaultEndpointName) suite.NoError(err) t.Cleanup(func() { - suite.DeleteGithubCredential(int64(creds.ID)) //nolint:gosec + suite.DeleteGithubCredential(int64(creds.ID)) }) privateKeyBytes, err := getTestFileContents("certs/srv-key.pem") @@ -148,7 +135,7 @@ func (suite *GarmSuite) TestGithubCredentialsUpdateFailsWhenBothPATAndAppAreSupp PrivateKeyBytes: privateKeyBytes, }, } - _, err = updateGithubCredentials(suite.cli, suite.authToken, int64(creds.ID), updateCredsParams) //nolint:gosec + _, err = updateGithubCredentials(suite.cli, suite.authToken, int64(creds.ID), updateCredsParams) suite.Error(err, "expected error when updating credentials with both PAT and App") expectAPIStatusCode(err, 400) } @@ -160,7 +147,7 @@ func (suite *GarmSuite) TestGithubCredentialsFailWhenAppKeyIsInvalid() { Name: dummyCredentialsName, Endpoint: defaultEndpointName, Description: "GARM test credentials", - AuthType: params.ForgeAuthTypeApp, + AuthType: params.GithubAuthTypeApp, App: params.GithubApp{ AppID: 123, InstallationID: 456, @@ -179,7 +166,7 @@ func (suite *GarmSuite) TestGithubCredentialsFailWhenEndpointDoesntExist() { Name: dummyCredentialsName, Endpoint: "iDontExist.example.com", Description: "GARM test credentials", - AuthType: params.ForgeAuthTypePAT, + AuthType: params.GithubAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "dummy", }, @@ -195,14 +182,14 @@ func (suite *GarmSuite) TestGithubCredentialsFailsOnDuplicateName() { creds, err := suite.createDummyCredentials(dummyCredentialsName, defaultEndpointName) suite.NoError(err) t.Cleanup(func() { - suite.DeleteGithubCredential(int64(creds.ID)) //nolint:gosec + suite.DeleteGithubCredential(int64(creds.ID)) }) createCredsParams := params.CreateGithubCredentialsParams{ Name: dummyCredentialsName, Endpoint: defaultEndpointName, Description: "GARM test credentials", - AuthType: params.ForgeAuthTypePAT, + AuthType: params.GithubAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "dummy", }, @@ -212,12 +199,12 @@ func (suite *GarmSuite) TestGithubCredentialsFailsOnDuplicateName() { expectAPIStatusCode(err, 409) } -func (suite *GarmSuite) createDummyCredentials(name, endpointName string) (*params.ForgeCredentials, error) { +func (suite *GarmSuite) createDummyCredentials(name, endpointName string) (*params.GithubCredentials, error) { createCredsParams := params.CreateGithubCredentialsParams{ Name: name, Endpoint: endpointName, Description: "GARM test credentials", - AuthType: params.ForgeAuthTypePAT, + AuthType: params.GithubAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "dummy", }, @@ -225,7 +212,7 @@ func (suite *GarmSuite) createDummyCredentials(name, endpointName string) (*para return suite.CreateGithubCredentials(createCredsParams) } -func (suite *GarmSuite) CreateGithubCredentials(credentialsParams params.CreateGithubCredentialsParams) (*params.ForgeCredentials, error) { +func (suite *GarmSuite) CreateGithubCredentials(credentialsParams params.CreateGithubCredentialsParams) (*params.GithubCredentials, error) { t := suite.T() t.Log("Create GitHub credentials") credentials, err := createGithubCredentials(suite.cli, suite.authToken, credentialsParams) diff --git a/test/integration/endpoints.go b/test/integration/endpoints.go index 720f43d2..9e47d854 100644 --- a/test/integration/endpoints.go +++ b/test/integration/endpoints.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package integration import ( @@ -22,7 +8,7 @@ import ( "github.com/cloudbase/garm/params" ) -func checkEndpointParamsAreEqual(a, b params.ForgeEndpoint) error { +func checkEndpointParamsAreEqual(a, b params.GithubEndpoint) error { if a.Name != b.Name { return fmt.Errorf("endpoint name mismatch") } diff --git a/test/integration/endpoints_test.go b/test/integration/endpoints_test.go index fe0dd160..c6295349 100644 --- a/test/integration/endpoints_test.go +++ b/test/integration/endpoints_test.go @@ -1,20 +1,6 @@ //go:build integration // +build integration -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package integration import ( @@ -114,7 +100,7 @@ func (suite *GarmSuite) TestGithubEndpointDeletionFailsWhenCredentialsExist() { err = deleteGithubEndpoint(suite.cli, suite.authToken, endpoint.Name) suite.Error(err, "expected error when deleting endpoint with credentials") - err = suite.DeleteGithubCredential(int64(creds.ID)) //nolint:gosec + err = suite.DeleteGithubCredential(int64(creds.ID)) suite.NoError(err, "error deleting credentials") err = suite.DeleteGithubEndpoint(endpoint.Name) suite.NoError(err, "error deleting endpoint") @@ -177,7 +163,7 @@ func (suite *GarmSuite) MustDefaultGithubEndpoint() { suite.Equal(ep.Name, "github.com", "default GitHub endpoint name mismatch") } -func (suite *GarmSuite) GetGithubEndpoint(name string) *params.ForgeEndpoint { +func (suite *GarmSuite) GetGithubEndpoint(name string) *params.GithubEndpoint { t := suite.T() t.Log("Get GitHub endpoint") endpoint, err := getGithubEndpoint(suite.cli, suite.authToken, name) @@ -186,7 +172,7 @@ func (suite *GarmSuite) GetGithubEndpoint(name string) *params.ForgeEndpoint { return endpoint } -func (suite *GarmSuite) CreateGithubEndpoint(params params.CreateGithubEndpointParams) (*params.ForgeEndpoint, error) { +func (suite *GarmSuite) CreateGithubEndpoint(params params.CreateGithubEndpointParams) (*params.GithubEndpoint, error) { t := suite.T() t.Log("Create GitHub endpoint") endpoint, err := createGithubEndpoint(suite.cli, suite.authToken, params) @@ -204,7 +190,7 @@ func (suite *GarmSuite) DeleteGithubEndpoint(name string) error { return nil } -func (suite *GarmSuite) ListGithubEndpoints() params.ForgeEndpoints { +func (suite *GarmSuite) ListGithubEndpoints() params.GithubEndpoints { t := suite.T() t.Log("List GitHub endpoints") endpoints, err := listGithubEndpoints(suite.cli, suite.authToken) @@ -213,7 +199,7 @@ func (suite *GarmSuite) ListGithubEndpoints() params.ForgeEndpoints { return endpoints } -func (suite *GarmSuite) createDummyEndpoint(name string) (*params.ForgeEndpoint, error) { +func (suite *GarmSuite) createDummyEndpoint(name string) (*params.GithubEndpoint, error) { endpointParams := params.CreateGithubEndpointParams{ Name: name, Description: "Dummy endpoint", diff --git a/test/integration/external_provider_test.go b/test/integration/external_provider_test.go index 2c85eb35..cc6b18aa 100644 --- a/test/integration/external_provider_test.go +++ b/test/integration/external_provider_test.go @@ -1,19 +1,6 @@ //go:build integration // +build integration -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package integration import ( @@ -86,7 +73,7 @@ func (suite *GarmSuite) WaitPoolInstances(poolID string, status commonParams.Ins "Pool instance with pool_id %s reached status %v and runner_status %v, desired_instance_count %d, pool_instance_count %d", poolID, status, runnerStatus, instancesCount, len(poolInstances)) - if pool.MinIdleRunnersAsInt() == instancesCount { + if int(pool.MinIdleRunners) == instancesCount { return nil } time.Sleep(5 * time.Second) diff --git a/test/integration/gh_cleanup/main.go b/test/integration/gh_cleanup/main.go index 86d39ea7..5d70705a 100644 --- a/test/integration/gh_cleanup/main.go +++ b/test/integration/gh_cleanup/main.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package main import ( @@ -155,7 +141,7 @@ func getGhOrgWebhook(url, ghToken, orgName string) (*github.Hook, error) { } for _, hook := range ghOrgHooks { - hookURL := hook.Config.GetURL() + hookURL := hook.GetURL() if hookURL == url { return hook, nil } @@ -172,7 +158,7 @@ func getGhRepoWebhook(url, ghToken, orgName, repoName string) (*github.Hook, err } for _, hook := range ghRepoHooks { - hookURL := hook.Config.GetURL() + hookURL := hook.GetURL() if hookURL == url { return hook, nil } diff --git a/test/integration/jobs_test.go b/test/integration/jobs_test.go index 4b2d9d5d..002ebeda 100644 --- a/test/integration/jobs_test.go +++ b/test/integration/jobs_test.go @@ -1,19 +1,6 @@ //go:build integration // +build integration -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package integration import ( diff --git a/test/integration/list_info_test.go b/test/integration/list_info_test.go index ddb3ff86..1eef816e 100644 --- a/test/integration/list_info_test.go +++ b/test/integration/list_info_test.go @@ -1,19 +1,6 @@ //go:build integration // +build integration -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package integration import ( diff --git a/test/integration/organizations_test.go b/test/integration/organizations_test.go index d587f4a5..46264d87 100644 --- a/test/integration/organizations_test.go +++ b/test/integration/organizations_test.go @@ -1,19 +1,6 @@ //go:build integration // +build integration -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package integration import ( @@ -70,7 +57,7 @@ func (suite *GarmSuite) CreateOrg(orgName, credentialsName, orgWebhookSecret str WebhookSecret: orgWebhookSecret, } org, err := createOrg(suite.cli, suite.authToken, orgParams) - suite.NoError(err, "error creating organization") + suite.Require().NoError(err, "error creating organization") return org } @@ -81,7 +68,7 @@ func (suite *GarmSuite) UpdateOrg(id, credentialsName string) *params.Organizati CredentialsName: credentialsName, } org, err := updateOrg(suite.cli, suite.authToken, id, updateParams) - suite.NoError(err, "error updating organization") + suite.Require().NoError(err, "error updating organization") return org } @@ -92,15 +79,15 @@ func (suite *GarmSuite) InstallOrgWebhook(id string) *params.HookInfo { WebhookEndpointType: params.WebhookEndpointDirect, } _, err := installOrgWebhook(suite.cli, suite.authToken, id, webhookParams) - suite.NoError(err, "error installing organization webhook") + suite.Require().NoError(err, "error installing organization webhook") webhookInfo, err := getOrgWebhook(suite.cli, suite.authToken, id) - suite.NoError(err, "error getting organization webhook") + suite.Require().NoError(err, "error getting organization webhook") return webhookInfo } func (suite *GarmSuite) ValidateOrgWebhookInstalled(ghToken, url, orgName string) { hook, err := getGhOrgWebhook(url, ghToken, orgName) - suite.NoError(err, "error getting github webhook") + suite.Require().NoError(err, "error getting github webhook") suite.NotNil(hook, "github webhook with url %s, for org %s was not properly installed", url, orgName) } @@ -125,12 +112,12 @@ func (suite *GarmSuite) UninstallOrgWebhook(id string) { t := suite.T() t.Logf("Uninstall org webhook with org_id %s", id) err := uninstallOrgWebhook(suite.cli, suite.authToken, id) - suite.NoError(err, "error uninstalling organization webhook") + suite.Require().NoError(err, "error uninstalling organization webhook") } func (suite *GarmSuite) ValidateOrgWebhookUninstalled(ghToken, url, orgName string) { hook, err := getGhOrgWebhook(url, ghToken, orgName) - suite.NoError(err, "error getting github webhook") + suite.Require().NoError(err, "error getting github webhook") suite.Nil(hook, "github webhook with url %s, for org %s was not properly uninstalled", url, orgName) } @@ -138,7 +125,7 @@ func (suite *GarmSuite) CreateOrgPool(orgID string, poolParams params.CreatePool t := suite.T() t.Logf("Create org pool with org_id %s", orgID) pool, err := createOrgPool(suite.cli, suite.authToken, orgID, poolParams) - suite.NoError(err, "error creating organization pool") + suite.Require().NoError(err, "error creating organization pool") return pool } @@ -146,7 +133,7 @@ func (suite *GarmSuite) GetOrgPool(orgID, orgPoolID string) *params.Pool { t := suite.T() t.Logf("Get org pool with org_id %s and pool_id %s", orgID, orgPoolID) pool, err := getOrgPool(suite.cli, suite.authToken, orgID, orgPoolID) - suite.NoError(err, "error getting organization pool") + suite.Require().NoError(err, "error getting organization pool") return pool } @@ -154,7 +141,7 @@ func (suite *GarmSuite) DeleteOrgPool(orgID, orgPoolID string) { t := suite.T() t.Logf("Delete org pool with org_id %s and pool_id %s", orgID, orgPoolID) err := deleteOrgPool(suite.cli, suite.authToken, orgID, orgPoolID) - suite.NoError(err, "error deleting organization pool") + suite.Require().NoError(err, "error deleting organization pool") } func (suite *GarmSuite) UpdateOrgPool(orgID, orgPoolID string, maxRunners, minIdleRunners uint) *params.Pool { @@ -165,14 +152,14 @@ func (suite *GarmSuite) UpdateOrgPool(orgID, orgPoolID string, maxRunners, minId MaxRunners: &maxRunners, } pool, err := updateOrgPool(suite.cli, suite.authToken, orgID, orgPoolID, poolParams) - suite.NoError(err, "error updating organization pool") + suite.Require().NoError(err, "error updating organization pool") return pool } func (suite *GarmSuite) WaitOrgRunningIdleInstances(orgID string, timeout time.Duration) { t := suite.T() orgPools, err := listOrgPools(suite.cli, suite.authToken, orgID) - suite.NoError(err, "error listing organization pools") + suite.Require().NoError(err, "error listing organization pools") for _, pool := range orgPools { err := suite.WaitPoolInstances(pool.ID, commonParams.InstanceRunning, params.RunnerIdle, timeout) if err != nil { @@ -187,19 +174,19 @@ func (suite *GarmSuite) dumpOrgInstancesDetails(orgID string) { // print org details t.Logf("Dumping org details with org_id %s", orgID) org, err := getOrg(suite.cli, suite.authToken, orgID) - suite.NoError(err, "error getting organization") + suite.Require().NoError(err, "error getting organization") err = printJSONResponse(org) - suite.NoError(err, "error printing organization") + suite.Require().NoError(err, "error printing organization") // print org instances details t.Logf("Dumping org instances details for org %s", orgID) instances, err := listOrgInstances(suite.cli, suite.authToken, orgID) - suite.NoError(err, "error listing organization instances") + suite.Require().NoError(err, "error listing organization instances") for _, instance := range instances { instance, err := getInstance(suite.cli, suite.authToken, instance.Name) - suite.NoError(err, "error getting instance") + suite.Require().NoError(err, "error getting instance") t.Logf("Instance info for instace %s", instance.Name) err = printJSONResponse(instance) - suite.NoError(err, "error printing instance") + suite.Require().NoError(err, "error printing instance") } } diff --git a/test/integration/repositories_test.go b/test/integration/repositories_test.go index 1b0558f9..2936cef8 100644 --- a/test/integration/repositories_test.go +++ b/test/integration/repositories_test.go @@ -1,19 +1,6 @@ //go:build integration // +build integration -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package integration import ( @@ -35,7 +22,7 @@ func (suite *GarmSuite) EnsureTestCredentials(name string, oauthToken string, en Name: name, Endpoint: endpointName, Description: "GARM test credentials", - AuthType: params.ForgeAuthTypePAT, + AuthType: params.GithubAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: oauthToken, }, @@ -54,7 +41,7 @@ func (suite *GarmSuite) TestRepositories() { CredentialsName: fmt.Sprintf("%s-clone", suite.credentialsName), } repo, err := updateRepo(suite.cli, suite.authToken, suite.repo.ID, updateParams) - suite.NoError(err, "error updating repository") + suite.Require().NoError(err, "error updating repository") suite.Equal(fmt.Sprintf("%s-clone", suite.credentialsName), repo.CredentialsName, "credentials name mismatch") suite.repo = repo @@ -101,16 +88,16 @@ func (suite *GarmSuite) InstallRepoWebhook(id string) *params.HookInfo { WebhookEndpointType: params.WebhookEndpointDirect, } _, err := installRepoWebhook(suite.cli, suite.authToken, id, webhookParams) - suite.NoError(err, "error installing repository webhook") + suite.Require().NoError(err, "error installing repository webhook") webhookInfo, err := getRepoWebhook(suite.cli, suite.authToken, id) - suite.NoError(err, "error getting repository webhook") + suite.Require().NoError(err, "error getting repository webhook") return webhookInfo } func (suite *GarmSuite) ValidateRepoWebhookInstalled(ghToken, url, orgName, repoName string) { hook, err := getGhRepoWebhook(url, ghToken, orgName, repoName) - suite.NoError(err, "error getting github webhook") + suite.Require().NoError(err, "error getting github webhook") suite.NotNil(hook, "github webhook with url %s, for repo %s/%s was not properly installed", url, orgName, repoName) } @@ -141,12 +128,12 @@ func (suite *GarmSuite) UninstallRepoWebhook(id string) { t := suite.T() t.Logf("Uninstall repo webhook with repo_id %s", id) err := uninstallRepoWebhook(suite.cli, suite.authToken, id) - suite.NoError(err, "error uninstalling repository webhook") + suite.Require().NoError(err, "error uninstalling repository webhook") } func (suite *GarmSuite) ValidateRepoWebhookUninstalled(ghToken, url, orgName, repoName string) { hook, err := getGhRepoWebhook(url, ghToken, orgName, repoName) - suite.NoError(err, "error getting github webhook") + suite.Require().NoError(err, "error getting github webhook") suite.Nil(hook, "github webhook with url %s, for repo %s/%s was not properly uninstalled", url, orgName, repoName) } @@ -154,7 +141,7 @@ func (suite *GarmSuite) CreateRepoPool(repoID string, poolParams params.CreatePo t := suite.T() t.Logf("Create repo pool with repo_id %s and pool_params %+v", repoID, poolParams) pool, err := createRepoPool(suite.cli, suite.authToken, repoID, poolParams) - suite.NoError(err, "error creating repository pool") + suite.Require().NoError(err, "error creating repository pool") return pool } @@ -162,7 +149,7 @@ func (suite *GarmSuite) GetRepoPool(repoID, repoPoolID string) *params.Pool { t := suite.T() t.Logf("Get repo pool repo_id %s and pool_id %s", repoID, repoPoolID) pool, err := getRepoPool(suite.cli, suite.authToken, repoID, repoPoolID) - suite.NoError(err, "error getting repository pool") + suite.Require().NoError(err, "error getting repository pool") return pool } @@ -170,7 +157,7 @@ func (suite *GarmSuite) DeleteRepoPool(repoID, repoPoolID string) { t := suite.T() t.Logf("Delete repo pool with repo_id %s and pool_id %s", repoID, repoPoolID) err := deleteRepoPool(suite.cli, suite.authToken, repoID, repoPoolID) - suite.NoError(err, "error deleting repository pool") + suite.Require().NoError(err, "error deleting repository pool") } func (suite *GarmSuite) UpdateRepoPool(repoID, repoPoolID string, maxRunners, minIdleRunners uint) *params.Pool { @@ -181,14 +168,14 @@ func (suite *GarmSuite) UpdateRepoPool(repoID, repoPoolID string, maxRunners, mi MaxRunners: &maxRunners, } pool, err := updateRepoPool(suite.cli, suite.authToken, repoID, repoPoolID, poolParams) - suite.NoError(err, "error updating repository pool") + suite.Require().NoError(err, "error updating repository pool") return pool } func (suite *GarmSuite) WaitRepoRunningIdleInstances(repoID string, timeout time.Duration) { t := suite.T() repoPools, err := listRepoPools(suite.cli, suite.authToken, repoID) - suite.NoError(err, "error listing repo pools") + suite.Require().NoError(err, "error listing repo pools") for _, pool := range repoPools { err := suite.WaitPoolInstances(pool.ID, commonParams.InstanceRunning, params.RunnerIdle, timeout) if err != nil { @@ -203,19 +190,19 @@ func (suite *GarmSuite) dumpRepoInstancesDetails(repoID string) { // print repo details t.Logf("Dumping repo details for repo %s", repoID) repo, err := getRepo(suite.cli, suite.authToken, repoID) - suite.NoError(err, "error getting repo") + suite.Require().NoError(err, "error getting repo") err = printJSONResponse(repo) - suite.NoError(err, "error printing repo") + suite.Require().NoError(err, "error printing repo") // print repo instances details t.Logf("Dumping repo instances details for repo %s", repoID) instances, err := listRepoInstances(suite.cli, suite.authToken, repoID) - suite.NoError(err, "error listing repo instances") + suite.Require().NoError(err, "error listing repo instances") for _, instance := range instances { instance, err := getInstance(suite.cli, suite.authToken, instance.Name) - suite.NoError(err, "error getting instance") + suite.Require().NoError(err, "error getting instance") t.Logf("Instance info for instance %s", instance.Name) err = printJSONResponse(instance) - suite.NoError(err, "error printing instance") + suite.Require().NoError(err, "error printing instance") } } diff --git a/test/integration/suite_test.go b/test/integration/suite_test.go index ca6b3030..c2f4bd5f 100644 --- a/test/integration/suite_test.go +++ b/test/integration/suite_test.go @@ -1,19 +1,6 @@ //go:build integration // +build integration -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package integration import ( diff --git a/test/integration/utils.go b/test/integration/utils.go index 1fa35b5e..24e97b7f 100644 --- a/test/integration/utils.go +++ b/test/integration/utils.go @@ -1,16 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package integration import ( diff --git a/testdata/config.toml b/testdata/config.toml index 337c0dd6..ee85ee33 100644 --- a/testdata/config.toml +++ b/testdata/config.toml @@ -82,8 +82,6 @@ time_to_live = "8760h" certificate = "" # The path on disk to the corresponding private key for the certificate. key = "" - [apiserver.webui] - enable = true [database] # Turn on/off debugging for database queries. diff --git a/util/appdefaults/appdefaults.go b/util/appdefaults/appdefaults.go index cc53f794..479db08f 100644 --- a/util/appdefaults/appdefaults.go +++ b/util/appdefaults/appdefaults.go @@ -1,16 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package appdefaults import "time" diff --git a/util/github/client.go b/util/github/client.go deleted file mode 100644 index b4ca32e5..00000000 --- a/util/github/client.go +++ /dev/null @@ -1,628 +0,0 @@ -// Copyright 2024 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package github - -import ( - "context" - "encoding/base64" - "encoding/json" - "errors" - "fmt" - "log/slog" - "net/http" - "net/url" - "strings" - - "github.com/google/go-github/v72/github" - - runnerErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm/cache" - "github.com/cloudbase/garm/metrics" - "github.com/cloudbase/garm/params" - "github.com/cloudbase/garm/runner/common" -) - -type githubClient struct { - *github.ActionsService - org *github.OrganizationsService - repo *github.RepositoriesService - enterprise *github.EnterpriseService - rateLimit *github.RateLimitService - - entity params.ForgeEntity - cli *github.Client -} - -func (g *githubClient) ListEntityHooks(ctx context.Context, opts *github.ListOptions) (ret []*github.Hook, response *github.Response, err error) { - metrics.GithubOperationCount.WithLabelValues( - "ListHooks", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - defer func() { - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "ListHooks", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - } - }() - switch g.entity.EntityType { - case params.ForgeEntityTypeRepository: - ret, response, err = g.repo.ListHooks(ctx, g.entity.Owner, g.entity.Name, opts) - case params.ForgeEntityTypeOrganization: - ret, response, err = g.org.ListHooks(ctx, g.entity.Owner, opts) - default: - return nil, nil, fmt.Errorf("invalid entity type: %s", g.entity.EntityType) - } - return ret, response, err -} - -func (g *githubClient) GetEntityHook(ctx context.Context, id int64) (ret *github.Hook, err error) { - metrics.GithubOperationCount.WithLabelValues( - "GetHook", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - defer func() { - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "GetHook", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - } - }() - switch g.entity.EntityType { - case params.ForgeEntityTypeRepository: - ret, _, err = g.repo.GetHook(ctx, g.entity.Owner, g.entity.Name, id) - case params.ForgeEntityTypeOrganization: - ret, _, err = g.org.GetHook(ctx, g.entity.Owner, id) - default: - return nil, errors.New("invalid entity type") - } - return ret, err -} - -func (g *githubClient) createGithubEntityHook(ctx context.Context, hook *github.Hook) (ret *github.Hook, err error) { - metrics.GithubOperationCount.WithLabelValues( - "CreateHook", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - defer func() { - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "CreateHook", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - } - }() - switch g.entity.EntityType { - case params.ForgeEntityTypeRepository: - ret, _, err = g.repo.CreateHook(ctx, g.entity.Owner, g.entity.Name, hook) - case params.ForgeEntityTypeOrganization: - ret, _, err = g.org.CreateHook(ctx, g.entity.Owner, hook) - default: - return nil, errors.New("invalid entity type") - } - return ret, err -} - -func (g *githubClient) CreateEntityHook(ctx context.Context, hook *github.Hook) (ret *github.Hook, err error) { - switch g.entity.Credentials.ForgeType { - case params.GithubEndpointType: - return g.createGithubEntityHook(ctx, hook) - case params.GiteaEndpointType: - return g.createGiteaEntityHook(ctx, hook) - default: - return nil, errors.New("invalid entity type") - } -} - -func (g *githubClient) DeleteEntityHook(ctx context.Context, id int64) (ret *github.Response, err error) { - metrics.GithubOperationCount.WithLabelValues( - "DeleteHook", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - defer func() { - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "DeleteHook", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - } - }() - switch g.entity.EntityType { - case params.ForgeEntityTypeRepository: - ret, err = g.repo.DeleteHook(ctx, g.entity.Owner, g.entity.Name, id) - case params.ForgeEntityTypeOrganization: - ret, err = g.org.DeleteHook(ctx, g.entity.Owner, id) - default: - return nil, errors.New("invalid entity type") - } - return ret, err -} - -func (g *githubClient) PingEntityHook(ctx context.Context, id int64) (ret *github.Response, err error) { - metrics.GithubOperationCount.WithLabelValues( - "PingHook", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - defer func() { - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "PingHook", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - } - }() - switch g.entity.EntityType { - case params.ForgeEntityTypeRepository: - ret, err = g.repo.PingHook(ctx, g.entity.Owner, g.entity.Name, id) - case params.ForgeEntityTypeOrganization: - ret, err = g.org.PingHook(ctx, g.entity.Owner, id) - default: - return nil, errors.New("invalid entity type") - } - return ret, err -} - -func (g *githubClient) ListEntityRunners(ctx context.Context, opts *github.ListRunnersOptions) (*github.Runners, *github.Response, error) { - var ret *github.Runners - var response *github.Response - var err error - - metrics.GithubOperationCount.WithLabelValues( - "ListEntityRunners", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - defer func() { - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "ListEntityRunners", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - } - }() - - switch g.entity.EntityType { - case params.ForgeEntityTypeRepository: - ret, response, err = g.ListRunners(ctx, g.entity.Owner, g.entity.Name, opts) - case params.ForgeEntityTypeOrganization: - ret, response, err = g.ListOrganizationRunners(ctx, g.entity.Owner, opts) - case params.ForgeEntityTypeEnterprise: - ret, response, err = g.enterprise.ListRunners(ctx, g.entity.Owner, opts) - default: - return nil, nil, errors.New("invalid entity type") - } - - return ret, response, err -} - -func (g *githubClient) ListEntityRunnerApplicationDownloads(ctx context.Context) ([]*github.RunnerApplicationDownload, *github.Response, error) { - var ret []*github.RunnerApplicationDownload - var response *github.Response - var err error - - metrics.GithubOperationCount.WithLabelValues( - "ListEntityRunnerApplicationDownloads", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - defer func() { - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "ListEntityRunnerApplicationDownloads", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - } - }() - - switch g.entity.EntityType { - case params.ForgeEntityTypeRepository: - ret, response, err = g.ListRunnerApplicationDownloads(ctx, g.entity.Owner, g.entity.Name) - case params.ForgeEntityTypeOrganization: - ret, response, err = g.ListOrganizationRunnerApplicationDownloads(ctx, g.entity.Owner) - case params.ForgeEntityTypeEnterprise: - ret, response, err = g.enterprise.ListRunnerApplicationDownloads(ctx, g.entity.Owner) - default: - return nil, nil, errors.New("invalid entity type") - } - - return ret, response, err -} - -func parseError(response *github.Response, err error) error { - var statusCode int - if response != nil { - statusCode = response.StatusCode - } - - switch statusCode { - case http.StatusNotFound: - return runnerErrors.ErrNotFound - case http.StatusUnauthorized: - return runnerErrors.ErrUnauthorized - case http.StatusUnprocessableEntity: - return runnerErrors.ErrBadRequest - default: - if statusCode >= 100 && statusCode < 300 { - return nil - } - if err != nil { - errResp := &github.ErrorResponse{} - if errors.As(err, &errResp) && errResp.Response != nil { - switch errResp.Response.StatusCode { - case http.StatusNotFound: - return runnerErrors.ErrNotFound - case http.StatusUnauthorized: - return runnerErrors.ErrUnauthorized - case http.StatusUnprocessableEntity: - return runnerErrors.ErrBadRequest - default: - // ugly hack. Gitea returns 500 if we try to remove a runner that does not exist. - if strings.Contains(err.Error(), "does not exist") { - return runnerErrors.ErrNotFound - } - return err - } - } - return err - } - return errors.New("unknown error") - } -} - -func (g *githubClient) RemoveEntityRunner(ctx context.Context, runnerID int64) error { - var response *github.Response - var err error - - metrics.GithubOperationCount.WithLabelValues( - "RemoveEntityRunner", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - defer func() { - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "RemoveEntityRunner", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - } - }() - - switch g.entity.EntityType { - case params.ForgeEntityTypeRepository: - response, err = g.RemoveRunner(ctx, g.entity.Owner, g.entity.Name, runnerID) - case params.ForgeEntityTypeOrganization: - response, err = g.RemoveOrganizationRunner(ctx, g.entity.Owner, runnerID) - case params.ForgeEntityTypeEnterprise: - response, err = g.enterprise.RemoveRunner(ctx, g.entity.Owner, runnerID) - default: - return errors.New("invalid entity type") - } - - if err := parseError(response, err); err != nil { - return fmt.Errorf("error removing runner %d: %w", runnerID, err) - } - - return nil -} - -func (g *githubClient) CreateEntityRegistrationToken(ctx context.Context) (*github.RegistrationToken, *github.Response, error) { - var ret *github.RegistrationToken - var response *github.Response - var err error - - metrics.GithubOperationCount.WithLabelValues( - "CreateEntityRegistrationToken", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - defer func() { - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "CreateEntityRegistrationToken", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - } - }() - - switch g.entity.EntityType { - case params.ForgeEntityTypeRepository: - ret, response, err = g.CreateRegistrationToken(ctx, g.entity.Owner, g.entity.Name) - case params.ForgeEntityTypeOrganization: - ret, response, err = g.CreateOrganizationRegistrationToken(ctx, g.entity.Owner) - case params.ForgeEntityTypeEnterprise: - ret, response, err = g.enterprise.CreateRegistrationToken(ctx, g.entity.Owner) - default: - return nil, nil, errors.New("invalid entity type") - } - - return ret, response, err -} - -func (g *githubClient) getOrganizationRunnerGroupIDByName(ctx context.Context, entity params.ForgeEntity, rgName string) (int64, error) { - opts := github.ListOrgRunnerGroupOptions{ - ListOptions: github.ListOptions{ - PerPage: 100, - }, - } - - for { - metrics.GithubOperationCount.WithLabelValues( - "ListOrganizationRunnerGroups", // label: operation - entity.LabelScope(), // label: scope - ).Inc() - runnerGroups, ghResp, err := g.ListOrganizationRunnerGroups(ctx, entity.Owner, &opts) - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "ListOrganizationRunnerGroups", // label: operation - entity.LabelScope(), // label: scope - ).Inc() - if ghResp != nil && ghResp.StatusCode == http.StatusUnauthorized { - return 0, fmt.Errorf("error fetching runners: %w", runnerErrors.ErrUnauthorized) - } - return 0, fmt.Errorf("error fetching runners: %w", err) - } - for _, runnerGroup := range runnerGroups.RunnerGroups { - if runnerGroup.Name != nil && *runnerGroup.Name == rgName { - return *runnerGroup.ID, nil - } - } - if ghResp.NextPage == 0 { - break - } - opts.Page = ghResp.NextPage - } - return 0, runnerErrors.NewNotFoundError("runner group %s not found", rgName) -} - -func (g *githubClient) getEnterpriseRunnerGroupIDByName(ctx context.Context, entity params.ForgeEntity, rgName string) (int64, error) { - opts := github.ListEnterpriseRunnerGroupOptions{ - ListOptions: github.ListOptions{ - PerPage: 100, - }, - } - - for { - metrics.GithubOperationCount.WithLabelValues( - "ListRunnerGroups", // label: operation - entity.LabelScope(), // label: scope - ).Inc() - runnerGroups, ghResp, err := g.enterprise.ListRunnerGroups(ctx, entity.Owner, &opts) - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "ListRunnerGroups", // label: operation - entity.LabelScope(), // label: scope - ).Inc() - if ghResp != nil && ghResp.StatusCode == http.StatusUnauthorized { - return 0, fmt.Errorf("error fetching runners: %w", runnerErrors.ErrUnauthorized) - } - return 0, fmt.Errorf("error fetching runners: %w", err) - } - for _, runnerGroup := range runnerGroups.RunnerGroups { - if runnerGroup.Name != nil && *runnerGroup.Name == rgName { - return *runnerGroup.ID, nil - } - } - if ghResp.NextPage == 0 { - break - } - opts.Page = ghResp.NextPage - } - return 0, runnerErrors.NewNotFoundError("runner group not found") -} - -func (g *githubClient) GetEntityRunnerGroupIDByName(ctx context.Context, runnerGroupName string) (int64, error) { - var rgID int64 = 1 - - if g.entity.EntityType == params.ForgeEntityTypeRepository { - // This is a repository. Runner groups are supported at the org and - // enterprise levels. Return the default runner group id, early. - return rgID, nil - } - - var ok bool - var err error - // attempt to get the runner group ID from cache. Cache will invalidate after 1 hour. - if runnerGroupName != "" && !strings.EqualFold(runnerGroupName, "default") { - rgID, ok = cache.GetEntityRunnerGroup(g.entity.ID, runnerGroupName) - if !ok || rgID == 0 { - switch g.entity.EntityType { - case params.ForgeEntityTypeOrganization: - rgID, err = g.getOrganizationRunnerGroupIDByName(ctx, g.entity, runnerGroupName) - case params.ForgeEntityTypeEnterprise: - rgID, err = g.getEnterpriseRunnerGroupIDByName(ctx, g.entity, runnerGroupName) - } - - if err != nil { - return 0, fmt.Errorf("getting runner group ID: %w", err) - } - } - // set cache. Avoid getting the same runner group for more than once an hour. - cache.SetEntityRunnerGroup(g.entity.ID, runnerGroupName, rgID) - } - return rgID, nil -} - -func (g *githubClient) GetEntityJITConfig(ctx context.Context, instance string, pool params.Pool, labels []string) (jitConfigMap map[string]string, runner *github.Runner, err error) { - rgID, err := g.GetEntityRunnerGroupIDByName(ctx, pool.GitHubRunnerGroup) - if err != nil { - return nil, nil, fmt.Errorf("failed to get runner group: %w", err) - } - slog.DebugContext(ctx, "using runner group", "group_name", pool.GitHubRunnerGroup, "runner_group_id", rgID) - req := github.GenerateJITConfigRequest{ - Name: instance, - RunnerGroupID: rgID, - Labels: labels, - // nolint:golangci-lint,godox - // TODO(gabriel-samfira): Should we make this configurable? - WorkFolder: github.Ptr("_work"), - } - - metrics.GithubOperationCount.WithLabelValues( - "GetEntityJITConfig", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - - var ret *github.JITRunnerConfig - var response *github.Response - - switch g.entity.EntityType { - case params.ForgeEntityTypeRepository: - ret, response, err = g.GenerateRepoJITConfig(ctx, g.entity.Owner, g.entity.Name, &req) - case params.ForgeEntityTypeOrganization: - ret, response, err = g.GenerateOrgJITConfig(ctx, g.entity.Owner, &req) - case params.ForgeEntityTypeEnterprise: - ret, response, err = g.enterprise.GenerateEnterpriseJITConfig(ctx, g.entity.Owner, &req) - } - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "GetEntityJITConfig", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - if response != nil && response.StatusCode == http.StatusUnauthorized { - return nil, nil, fmt.Errorf("failed to get JIT config: %w", err) - } - return nil, nil, fmt.Errorf("failed to get JIT config: %w", err) - } - - defer func(run *github.Runner) { - if err != nil && run != nil { - innerErr := g.RemoveEntityRunner(ctx, run.GetID()) - slog.With(slog.Any("error", innerErr)).ErrorContext( - ctx, "failed to remove runner", - "runner_id", run.GetID(), string(g.entity.EntityType), g.entity.String()) - } - }(ret.Runner) - - decoded, err := base64.StdEncoding.DecodeString(*ret.EncodedJITConfig) - if err != nil { - return nil, nil, fmt.Errorf("failed to decode JIT config: %w", err) - } - - var jitConfig map[string]string - if err := json.Unmarshal(decoded, &jitConfig); err != nil { - return nil, nil, fmt.Errorf("failed to unmarshal JIT config: %w", err) - } - - return jitConfig, ret.Runner, nil -} - -func (g *githubClient) RateLimit(ctx context.Context) (*github.RateLimits, error) { - limits, resp, err := g.rateLimit.Get(ctx) - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "GetRateLimit", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - } - if err := parseError(resp, err); err != nil { - return nil, fmt.Errorf("getting rate limit: %w", err) - } - return limits, nil -} - -func (g *githubClient) GetEntity() params.ForgeEntity { - return g.entity -} - -func (g *githubClient) GithubBaseURL() *url.URL { - return g.cli.BaseURL -} - -func NewRateLimitClient(ctx context.Context, credentials params.ForgeCredentials) (common.RateLimitClient, error) { - httpClient, err := credentials.GetHTTPClient(ctx) - if err != nil { - return nil, fmt.Errorf("error fetching http client: %w", err) - } - - slog.DebugContext( - ctx, "creating rate limit client", - "base_url", credentials.APIBaseURL, - "upload_url", credentials.UploadBaseURL) - - ghClient, err := github.NewClient(httpClient).WithEnterpriseURLs( - credentials.APIBaseURL, credentials.UploadBaseURL) - if err != nil { - return nil, fmt.Errorf("error fetching github client: %w", err) - } - cli := &githubClient{ - rateLimit: ghClient.RateLimit, - cli: ghClient, - } - - return cli, nil -} - -func withGiteaURLs(client *github.Client, apiBaseURL string) (*github.Client, error) { - if client == nil { - return nil, errors.New("client is nil") - } - - if apiBaseURL == "" { - return nil, errors.New("invalid gitea URLs") - } - - parsedBaseURL, err := url.ParseRequestURI(apiBaseURL) - if err != nil { - return nil, fmt.Errorf("error parsing gitea base URL: %w", err) - } - - if !strings.HasSuffix(parsedBaseURL.Path, "/") { - parsedBaseURL.Path += "/" - } - - if !strings.HasSuffix(parsedBaseURL.Path, "/api/v1/") { - parsedBaseURL.Path += "api/v1/" - } - - client.BaseURL = parsedBaseURL - client.UploadURL = parsedBaseURL - - return client, nil -} - -func Client(ctx context.Context, entity params.ForgeEntity) (common.GithubClient, error) { - // func GithubClient(ctx context.Context, entity params.ForgeEntity) (common.GithubClient, error) { - httpClient, err := entity.Credentials.GetHTTPClient(ctx) - if err != nil { - return nil, fmt.Errorf("error fetching http client: %w", err) - } - - slog.DebugContext( - ctx, "creating client for entity", - "entity", entity.String(), "base_url", entity.Credentials.APIBaseURL, - "upload_url", entity.Credentials.UploadBaseURL) - - ghClient := github.NewClient(httpClient) - switch entity.Credentials.ForgeType { - case params.GithubEndpointType: - ghClient, err = ghClient.WithEnterpriseURLs(entity.Credentials.APIBaseURL, entity.Credentials.UploadBaseURL) - case params.GiteaEndpointType: - ghClient, err = withGiteaURLs(ghClient, entity.Credentials.APIBaseURL) - } - - if err != nil { - return nil, fmt.Errorf("error fetching github client: %w", err) - } - - cli := &githubClient{ - ActionsService: ghClient.Actions, - org: ghClient.Organizations, - repo: ghClient.Repositories, - enterprise: ghClient.Enterprise, - rateLimit: ghClient.RateLimit, - cli: ghClient, - entity: entity, - } - - return cli, nil -} diff --git a/util/github/gitea.go b/util/github/gitea.go deleted file mode 100644 index 5d35190b..00000000 --- a/util/github/gitea.go +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package github - -import ( - "context" - "errors" - "fmt" - "net/http" - - "github.com/google/go-github/v72/github" - - "github.com/cloudbase/garm/metrics" - "github.com/cloudbase/garm/params" -) - -type createGiteaHookOptions struct { - Type string `json:"type"` - Config map[string]string `json:"config"` - Events []string `json:"events"` - BranchFilter string `json:"branch_filter"` - Active bool `json:"active"` - AuthorizationHeader string `json:"authorization_header"` -} - -func (g *githubClient) createGiteaRepoHook(ctx context.Context, owner, name string, hook *github.Hook) (ret *github.Hook, err error) { - u := fmt.Sprintf("repos/%v/%v/hooks", owner, name) - createOpts := &createGiteaHookOptions{ - Type: "gitea", - Events: hook.Events, - Active: hook.GetActive(), - BranchFilter: "*", - Config: map[string]string{ - "content_type": hook.GetConfig().GetContentType(), - "url": hook.GetConfig().GetURL(), - "http_method": "post", - "secret": hook.GetConfig().GetSecret(), - }, - } - - req, err := g.cli.NewRequest(http.MethodPost, u, createOpts) - if err != nil { - return nil, fmt.Errorf("failed to construct request: %w", err) - } - - hook = new(github.Hook) - _, err = g.cli.Do(ctx, req, hook) - if err != nil { - return nil, fmt.Errorf("request failed for %s: %w", req.URL.String(), err) - } - return hook, nil -} - -func (g *githubClient) createGiteaOrgHook(ctx context.Context, owner string, hook *github.Hook) (ret *github.Hook, err error) { - u := fmt.Sprintf("orgs/%v/hooks", owner) - createOpts := &createGiteaHookOptions{ - Type: "gitea", - Events: hook.Events, - Active: hook.GetActive(), - BranchFilter: "*", - Config: map[string]string{ - "content_type": hook.GetConfig().GetContentType(), - "url": hook.GetConfig().GetURL(), - "http_method": "post", - "secret": hook.GetConfig().GetSecret(), - }, - } - - req, err := g.cli.NewRequest(http.MethodPost, u, createOpts) - if err != nil { - return nil, fmt.Errorf("failed to construct request: %w", err) - } - - hook = new(github.Hook) - _, err = g.cli.Do(ctx, req, hook) - if err != nil { - return nil, fmt.Errorf("request failed for %s: %w", req.URL.String(), err) - } - return hook, nil -} - -func (g *githubClient) createGiteaEntityHook(ctx context.Context, hook *github.Hook) (ret *github.Hook, err error) { - metrics.GithubOperationCount.WithLabelValues( - "CreateHook", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - defer func() { - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "CreateHook", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - } - }() - switch g.entity.EntityType { - case params.ForgeEntityTypeRepository: - ret, err = g.createGiteaRepoHook(ctx, g.entity.Owner, g.entity.Name, hook) - case params.ForgeEntityTypeOrganization: - ret, err = g.createGiteaOrgHook(ctx, g.entity.Owner, hook) - default: - return nil, errors.New("invalid entity type") - } - return ret, err -} diff --git a/util/github/scalesets/client.go b/util/github/scalesets/client.go deleted file mode 100644 index 6b4b1bab..00000000 --- a/util/github/scalesets/client.go +++ /dev/null @@ -1,104 +0,0 @@ -// Copyright 2024 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package scalesets - -import ( - "fmt" - "io" - "net/http" - "sync" - - "github.com/google/go-github/v72/github" - - runnerErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm/params" - "github.com/cloudbase/garm/runner/common" -) - -func NewClient(cli common.GithubClient) (*ScaleSetClient, error) { - return &ScaleSetClient{ - ghCli: cli, - httpClient: &http.Client{}, - }, nil -} - -type ScaleSetClient struct { - ghCli common.GithubClient - httpClient *http.Client - - // scale sets are aparently available through the same security - // contex that a normal runner would use. We connect to the same - // API endpoint a runner would connect to, in order to fetch jobs. - // To do this, we use a runner registration token. - runnerRegistrationToken *github.RegistrationToken - // actionsServiceInfo holds the pipeline URL and the JWT token to - // access it. The pipeline URL is the base URL where we can access - // the scale set endpoints. - actionsServiceInfo *params.ActionsServiceAdminInfoResponse - - mux sync.Mutex -} - -func (s *ScaleSetClient) SetGithubClient(cli common.GithubClient) { - s.mux.Lock() - defer s.mux.Unlock() - s.ghCli = cli -} - -func (s *ScaleSetClient) GetGithubClient() (common.GithubClient, error) { - s.mux.Lock() - defer s.mux.Unlock() - if s.ghCli == nil { - return nil, fmt.Errorf("github client is not set in scaleset client") - } - return s.ghCli, nil -} - -func (s *ScaleSetClient) Do(req *http.Request) (*http.Response, error) { - if s.httpClient == nil { - return nil, fmt.Errorf("http client is not initialized") - } - - resp, err := s.httpClient.Do(req) - if err != nil { - return nil, fmt.Errorf("failed to dispatch HTTP request: %w", err) - } - - if resp.StatusCode >= 200 && resp.StatusCode < 300 { - return resp, nil - } - - var body []byte - if resp != nil { - defer resp.Body.Close() - body, err = io.ReadAll(resp.Body) - if err != nil { - return nil, fmt.Errorf("failed to read body: %w", err) - } - } - - switch resp.StatusCode { - case 404: - return nil, runnerErrors.NewNotFoundError("resource %s not found: %q", req.URL.String(), string(body)) - case 400: - return nil, runnerErrors.NewBadRequestError("bad request while calling %s: %q", req.URL.String(), string(body)) - case 409: - return nil, runnerErrors.NewConflictError("conflict while calling %s: %q", req.URL.String(), string(body)) - case 401, 403: - return nil, runnerErrors.ErrUnauthorized - default: - return nil, fmt.Errorf("request to %s failed with status code %d: %q", req.URL.String(), resp.StatusCode, string(body)) - } -} diff --git a/util/github/scalesets/jobs.go b/util/github/scalesets/jobs.go deleted file mode 100644 index defc9506..00000000 --- a/util/github/scalesets/jobs.go +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright 2024 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package scalesets - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "net/http" - - "github.com/cloudbase/garm/params" -) - -type acquireJobsResult struct { - Count int `json:"count"` - Value []int64 `json:"value"` -} - -func (s *ScaleSetClient) AcquireJobs(ctx context.Context, runnerScaleSetID int, messageQueueAccessToken string, requestIDs []int64) ([]int64, error) { - u := fmt.Sprintf("%s/%d/acquirejobs?api-version=6.0-preview", scaleSetEndpoint, runnerScaleSetID) - - body, err := json.Marshal(requestIDs) - if err != nil { - return nil, err - } - - req, err := s.newActionsRequest(ctx, http.MethodPost, u, bytes.NewBuffer(body)) - if err != nil { - return nil, fmt.Errorf("failed to construct request: %w", err) - } - - req.Header.Set("Content-Type", "application/json") - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", messageQueueAccessToken)) - - resp, err := s.Do(req) - if err != nil { - return nil, fmt.Errorf("request failed for %s: %w", req.URL.String(), err) - } - defer resp.Body.Close() - - var acquiredJobs acquireJobsResult - err = json.NewDecoder(resp.Body).Decode(&acquiredJobs) - if err != nil { - return nil, fmt.Errorf("failed to decode response: %w", err) - } - - return acquiredJobs.Value, nil -} - -func (s *ScaleSetClient) GetAcquirableJobs(ctx context.Context, runnerScaleSetID int) (params.AcquirableJobList, error) { - path := fmt.Sprintf("%d/acquirablejobs", runnerScaleSetID) - - req, err := s.newActionsRequest(ctx, http.MethodGet, path, nil) - if err != nil { - return params.AcquirableJobList{}, fmt.Errorf("failed to construct request: %w", err) - } - - resp, err := s.Do(req) - if err != nil { - return params.AcquirableJobList{}, fmt.Errorf("request failed for %s: %w", req.URL.String(), err) - } - defer resp.Body.Close() - - if resp.StatusCode == http.StatusNoContent { - return params.AcquirableJobList{Count: 0, Jobs: []params.AcquirableJob{}}, nil - } - - var acquirableJobList params.AcquirableJobList - err = json.NewDecoder(resp.Body).Decode(&acquirableJobList) - if err != nil { - return params.AcquirableJobList{}, fmt.Errorf("failed to decode response: %w", err) - } - - return acquirableJobList, nil -} diff --git a/util/github/scalesets/message_sessions.go b/util/github/scalesets/message_sessions.go deleted file mode 100644 index 8fafc2c4..00000000 --- a/util/github/scalesets/message_sessions.go +++ /dev/null @@ -1,291 +0,0 @@ -// Copyright 2024 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package scalesets - -import ( - "bytes" - "context" - "crypto/rand" - "encoding/json" - "errors" - "fmt" - "log/slog" - "math/big" - "net/http" - "net/url" - "strconv" - "sync" - "time" - - runnerErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm/params" - garmUtil "github.com/cloudbase/garm/util" -) - -const maxCapacityHeader = "X-ScaleSetMaxCapacity" - -type MessageSession struct { - ssCli *ScaleSetClient - session *params.RunnerScaleSetSession - ctx context.Context - - done chan struct{} - closed bool - lastErr error - - mux sync.Mutex -} - -func (m *MessageSession) Close() error { - m.mux.Lock() - defer m.mux.Unlock() - if m.closed { - return nil - } - close(m.done) - m.closed = true - return nil -} - -func (m *MessageSession) MessageQueueAccessToken() string { - return m.session.MessageQueueAccessToken -} - -func (m *MessageSession) LastError() error { - return m.lastErr -} - -func (m *MessageSession) loop() { - slog.DebugContext(m.ctx, "starting message session refresh loop", "session_id", m.session.SessionID.String()) - timer := time.NewTicker(1 * time.Minute) - defer timer.Stop() - defer m.Close() - - if m.closed { - slog.DebugContext(m.ctx, "message session refresh loop closed") - return - } - for { - select { - case <-m.ctx.Done(): - slog.DebugContext(m.ctx, "message session refresh loop context done") - return - case <-m.done: - slog.DebugContext(m.ctx, "message session refresh loop done") - return - case <-timer.C: - if err := m.maybeRefreshToken(m.ctx); err != nil { - // We endlessly retry. If it's a transient error, it should eventually - // work, if it's credentials issues, users can update them. - slog.With(slog.Any("error", err)).ErrorContext(m.ctx, "failed to refresh message queue token") - m.lastErr = err - continue - } - m.lastErr = nil - } - } -} - -func (m *MessageSession) SessionsRelativeURL() (string, error) { - if m.session == nil { - return "", fmt.Errorf("session is nil") - } - if m.session.RunnerScaleSet == nil { - return "", fmt.Errorf("runner scale set is nil") - } - relativePath := fmt.Sprintf("%s/%d/sessions/%s", scaleSetEndpoint, m.session.RunnerScaleSet.ID, m.session.SessionID.String()) - return relativePath, nil -} - -func (m *MessageSession) Refresh(ctx context.Context) error { - slog.DebugContext(ctx, "refreshing message session token", "session_id", m.session.SessionID.String()) - m.mux.Lock() - defer m.mux.Unlock() - - relPath, err := m.SessionsRelativeURL() - if err != nil { - return fmt.Errorf("failed to get session URL: %w", err) - } - req, err := m.ssCli.newActionsRequest(ctx, http.MethodPatch, relPath, nil) - if err != nil { - return fmt.Errorf("failed to create message delete request: %w", err) - } - resp, err := m.ssCli.Do(req) - if err != nil { - return fmt.Errorf("failed to delete message session: %w", err) - } - defer resp.Body.Close() - - var refreshedSession params.RunnerScaleSetSession - if err := json.NewDecoder(resp.Body).Decode(&refreshedSession); err != nil { - return fmt.Errorf("failed to decode response: %w", err) - } - slog.DebugContext(ctx, "refreshed message session token") - m.session = &refreshedSession - return nil -} - -func (m *MessageSession) maybeRefreshToken(ctx context.Context) error { - if m.session == nil { - return fmt.Errorf("session is nil") - } - - expiresAt, err := m.session.ExiresAt() - if err != nil { - return fmt.Errorf("failed to get expires at: %w", err) - } - // add some jitter (30 second interval) - randInt, err := rand.Int(rand.Reader, big.NewInt(30)) - if err != nil { - return fmt.Errorf("failed to get a random number") - } - expiresIn := time.Duration(randInt.Int64())*time.Second + 10*time.Minute - slog.DebugContext(ctx, "checking if message session token needs refresh", "expires_at", expiresAt) - if m.session.ExpiresIn(expiresIn) { - if err := m.Refresh(ctx); err != nil { - return fmt.Errorf("failed to refresh message queue token: %w", err) - } - } - - return nil -} - -func (m *MessageSession) GetMessage(ctx context.Context, lastMessageID int64, maxCapacity uint) (params.RunnerScaleSetMessage, error) { - u, err := url.Parse(m.session.MessageQueueURL) - if err != nil { - return params.RunnerScaleSetMessage{}, err - } - - if lastMessageID > 0 { - q := u.Query() - q.Set("lastMessageId", strconv.FormatInt(lastMessageID, 10)) - u.RawQuery = q.Encode() - } - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) - if err != nil { - return params.RunnerScaleSetMessage{}, fmt.Errorf("failed to create request: %w", err) - } - - req.Header.Set("Accept", "application/json; api-version=6.0-preview") - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", m.session.MessageQueueAccessToken)) - req.Header.Set(maxCapacityHeader, fmt.Sprintf("%d", maxCapacity)) - - resp, err := m.ssCli.Do(req) - if err != nil { - return params.RunnerScaleSetMessage{}, fmt.Errorf("request to %s failed: %w", req.URL.String(), err) - } - defer resp.Body.Close() - - if resp.StatusCode == http.StatusAccepted { - slog.DebugContext(ctx, "no messages available in queue") - return params.RunnerScaleSetMessage{}, nil - } - - var message params.RunnerScaleSetMessage - if err := json.NewDecoder(resp.Body).Decode(&message); err != nil { - return params.RunnerScaleSetMessage{}, fmt.Errorf("failed to decode response: %w", err) - } - return message, nil -} - -func (m *MessageSession) DeleteMessage(ctx context.Context, messageID int64) error { - u, err := url.Parse(m.session.MessageQueueURL) - if err != nil { - return err - } - - u.Path = fmt.Sprintf("%s/%d", u.Path, messageID) - - req, err := http.NewRequestWithContext(ctx, http.MethodDelete, u.String(), nil) - if err != nil { - return err - } - - req.Header.Set("Content-Type", "application/json") - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", m.session.MessageQueueAccessToken)) - - resp, err := m.ssCli.Do(req) - if err != nil { - return err - } - resp.Body.Close() - - return nil -} - -func (s *ScaleSetClient) CreateMessageSession(ctx context.Context, runnerScaleSetID int, owner string) (*MessageSession, error) { - path := fmt.Sprintf("%s/%d/sessions", scaleSetEndpoint, runnerScaleSetID) - - newSession := params.RunnerScaleSetSession{ - OwnerName: owner, - } - - requestData, err := json.Marshal(newSession) - if err != nil { - return nil, fmt.Errorf("failed to marshal session data: %w", err) - } - - req, err := s.newActionsRequest(ctx, http.MethodPost, path, bytes.NewBuffer(requestData)) - if err != nil { - return nil, fmt.Errorf("failed to create request: %w", err) - } - - resp, err := s.Do(req) - if err != nil { - return nil, fmt.Errorf("failed to execute request to %s: %w", req.URL.String(), err) - } - defer resp.Body.Close() - - var createdSession params.RunnerScaleSetSession - if err := json.NewDecoder(resp.Body).Decode(&createdSession); err != nil { - return nil, fmt.Errorf("failed to decode response: %w", err) - } - - msgSessionCtx := garmUtil.WithSlogContext( - ctx, - slog.Any("session_id", createdSession.SessionID.String())) - sess := &MessageSession{ - ssCli: s, - session: &createdSession, - ctx: msgSessionCtx, - done: make(chan struct{}), - closed: false, - } - go sess.loop() - - return sess, nil -} - -func (s *ScaleSetClient) DeleteMessageSession(ctx context.Context, session *MessageSession) error { - path, err := session.SessionsRelativeURL() - if err != nil { - return fmt.Errorf("failed to delete session: %w", err) - } - - req, err := s.newActionsRequest(ctx, http.MethodDelete, path, nil) - if err != nil { - return fmt.Errorf("failed to create message delete request: %w", err) - } - - resp, err := s.Do(req) - if err != nil { - if !errors.Is(err, runnerErrors.ErrNotFound) { - return fmt.Errorf("failed to delete message session: %w", err) - } - } - defer resp.Body.Close() - return nil -} diff --git a/util/github/scalesets/runners.go b/util/github/scalesets/runners.go deleted file mode 100644 index 79c321bc..00000000 --- a/util/github/scalesets/runners.go +++ /dev/null @@ -1,154 +0,0 @@ -// Copyright 2024 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package scalesets - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "net/http" - - runnerErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm/params" -) - -type scaleSetJitRunnerConfig struct { - Name string `json:"name"` - WorkFolder string `json:"workFolder"` -} - -func (s *ScaleSetClient) GenerateJitRunnerConfig(ctx context.Context, runnerName string, scaleSetID int) (params.RunnerScaleSetJitRunnerConfig, error) { - runnerSettings := scaleSetJitRunnerConfig{ - Name: runnerName, - WorkFolder: "_work", - } - - body, err := json.Marshal(runnerSettings) - if err != nil { - return params.RunnerScaleSetJitRunnerConfig{}, err - } - - if err := s.ensureAdminInfo(ctx); err != nil { - return params.RunnerScaleSetJitRunnerConfig{}, fmt.Errorf("failed to ensure admin info: %w", err) - } - - jitConfigPath := fmt.Sprintf("%s/%d/generatejitconfig", scaleSetEndpoint, scaleSetID) - req, err := s.newActionsRequest(ctx, http.MethodPost, jitConfigPath, bytes.NewBuffer(body)) - if err != nil { - return params.RunnerScaleSetJitRunnerConfig{}, fmt.Errorf("failed to create request: %w", err) - } - - resp, err := s.Do(req) - if err != nil { - return params.RunnerScaleSetJitRunnerConfig{}, fmt.Errorf("request failed for %s: %w", req.URL.String(), err) - } - defer resp.Body.Close() - - var runnerJitConfig params.RunnerScaleSetJitRunnerConfig - if err := json.NewDecoder(resp.Body).Decode(&runnerJitConfig); err != nil { - return params.RunnerScaleSetJitRunnerConfig{}, fmt.Errorf("failed to decode response: %w", err) - } - return runnerJitConfig, nil -} - -func (s *ScaleSetClient) GetRunner(ctx context.Context, runnerID int64) (params.RunnerReference, error) { - path := fmt.Sprintf("%s/%d", runnerEndpoint, runnerID) - - req, err := s.newActionsRequest(ctx, http.MethodGet, path, nil) - if err != nil { - return params.RunnerReference{}, fmt.Errorf("failed to construct request: %w", err) - } - - resp, err := s.Do(req) - if err != nil { - return params.RunnerReference{}, fmt.Errorf("request failed for %s: %w", req.URL.String(), err) - } - defer resp.Body.Close() - - var runnerReference params.RunnerReference - if err := json.NewDecoder(resp.Body).Decode(&runnerReference); err != nil { - return params.RunnerReference{}, fmt.Errorf("failed to decode response: %w", err) - } - - return runnerReference, nil -} - -func (s *ScaleSetClient) ListAllRunners(ctx context.Context) (params.RunnerReferenceList, error) { - req, err := s.newActionsRequest(ctx, http.MethodGet, runnerEndpoint, nil) - if err != nil { - return params.RunnerReferenceList{}, fmt.Errorf("failed to construct request: %w", err) - } - - resp, err := s.Do(req) - if err != nil { - return params.RunnerReferenceList{}, fmt.Errorf("request failed for %s: %w", req.URL.String(), err) - } - defer resp.Body.Close() - - var runnerList params.RunnerReferenceList - if err := json.NewDecoder(resp.Body).Decode(&runnerList); err != nil { - return params.RunnerReferenceList{}, fmt.Errorf("failed to decode response: %w", err) - } - - return runnerList, nil -} - -func (s *ScaleSetClient) GetRunnerByName(ctx context.Context, runnerName string) (params.RunnerReference, error) { - path := fmt.Sprintf("%s?agentName=%s", runnerEndpoint, runnerName) - - req, err := s.newActionsRequest(ctx, http.MethodGet, path, nil) - if err != nil { - return params.RunnerReference{}, fmt.Errorf("failed to construct request: %w", err) - } - - resp, err := s.Do(req) - if err != nil { - return params.RunnerReference{}, fmt.Errorf("request failed for %s: %w", req.URL.String(), err) - } - defer resp.Body.Close() - - var runnerList params.RunnerReferenceList - if err := json.NewDecoder(resp.Body).Decode(&runnerList); err != nil { - return params.RunnerReference{}, fmt.Errorf("failed to decode response: %w", err) - } - - if runnerList.Count == 0 { - return params.RunnerReference{}, fmt.Errorf("could not find runner with name %q: %w", runnerName, runnerErrors.ErrNotFound) - } - - if runnerList.Count > 1 { - return params.RunnerReference{}, fmt.Errorf("failed to decode response: %w", err) - } - - return runnerList.RunnerReferences[0], nil -} - -func (s *ScaleSetClient) RemoveRunner(ctx context.Context, runnerID int64) error { - path := fmt.Sprintf("%s/%d", runnerEndpoint, runnerID) - - req, err := s.newActionsRequest(ctx, http.MethodDelete, path, nil) - if err != nil { - return fmt.Errorf("failed to construct request: %w", err) - } - - resp, err := s.Do(req) - if err != nil { - return fmt.Errorf("request failed for %s: %w", req.URL.String(), err) - } - - resp.Body.Close() - return nil -} diff --git a/util/github/scalesets/scalesets.go b/util/github/scalesets/scalesets.go deleted file mode 100644 index 2aae493a..00000000 --- a/util/github/scalesets/scalesets.go +++ /dev/null @@ -1,209 +0,0 @@ -// Copyright 2024 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package scalesets - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "net/http" - "net/http/httputil" - - runnerErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm/params" -) - -const ( - runnerEndpoint = "_apis/distributedtask/pools/0/agents" - scaleSetEndpoint = "_apis/runtime/runnerscalesets" -) - -const ( - HeaderActionsActivityID = "ActivityId" - HeaderGitHubRequestID = "X-GitHub-Request-Id" -) - -func (s *ScaleSetClient) GetRunnerScaleSetByNameAndRunnerGroup(ctx context.Context, runnerGroupID int, name string) (params.RunnerScaleSet, error) { - path := fmt.Sprintf("%s?runnerGroupId=%d&name=%s", scaleSetEndpoint, runnerGroupID, name) - req, err := s.newActionsRequest(ctx, http.MethodGet, path, nil) - if err != nil { - return params.RunnerScaleSet{}, err - } - - resp, err := s.Do(req) - if err != nil { - return params.RunnerScaleSet{}, err - } - defer resp.Body.Close() - - var runnerScaleSetList *params.RunnerScaleSetsResponse - if err := json.NewDecoder(resp.Body).Decode(&runnerScaleSetList); err != nil { - return params.RunnerScaleSet{}, fmt.Errorf("failed to decode response: %w", err) - } - if runnerScaleSetList.Count == 0 { - return params.RunnerScaleSet{}, runnerErrors.NewNotFoundError("runner scale set with name %s and runner group ID %d was not found", name, runnerGroupID) - } - - // Runner scale sets must have a uniqe name. Attempting to create a runner scale set with the same name as - // an existing scale set will result in a Bad Request (400) error. - return runnerScaleSetList.RunnerScaleSets[0], nil -} - -func (s *ScaleSetClient) GetRunnerScaleSetByID(ctx context.Context, runnerScaleSetID int) (params.RunnerScaleSet, error) { - path := fmt.Sprintf("%s/%d", scaleSetEndpoint, runnerScaleSetID) - req, err := s.newActionsRequest(ctx, http.MethodGet, path, nil) - if err != nil { - return params.RunnerScaleSet{}, err - } - - resp, err := s.Do(req) - if err != nil { - return params.RunnerScaleSet{}, fmt.Errorf("failed to get runner scaleset with ID %d: %w", runnerScaleSetID, err) - } - defer resp.Body.Close() - - var runnerScaleSet params.RunnerScaleSet - if err := json.NewDecoder(resp.Body).Decode(&runnerScaleSet); err != nil { - return params.RunnerScaleSet{}, fmt.Errorf("failed to decode response: %w", err) - } - return runnerScaleSet, nil -} - -// ListRunnerScaleSets lists all runner scale sets in a github entity. -func (s *ScaleSetClient) ListRunnerScaleSets(ctx context.Context) (*params.RunnerScaleSetsResponse, error) { - req, err := s.newActionsRequest(ctx, http.MethodGet, scaleSetEndpoint, nil) - if err != nil { - return nil, err - } - data, err := httputil.DumpRequest(req, false) - if err == nil { - fmt.Println(string(data)) - } - resp, err := s.Do(req) - if err != nil { - return nil, fmt.Errorf("failed to list runner scale sets: %w", err) - } - defer resp.Body.Close() - - var runnerScaleSetList params.RunnerScaleSetsResponse - if err := json.NewDecoder(resp.Body).Decode(&runnerScaleSetList); err != nil { - return nil, fmt.Errorf("failed to decode response: %w", err) - } - - return &runnerScaleSetList, nil -} - -// CreateRunnerScaleSet creates a new runner scale set in the target GitHub entity. -func (s *ScaleSetClient) CreateRunnerScaleSet(ctx context.Context, runnerScaleSet *params.RunnerScaleSet) (params.RunnerScaleSet, error) { - body, err := json.Marshal(runnerScaleSet) - if err != nil { - return params.RunnerScaleSet{}, err - } - - req, err := s.newActionsRequest(ctx, http.MethodPost, scaleSetEndpoint, bytes.NewReader(body)) - if err != nil { - return params.RunnerScaleSet{}, err - } - - resp, err := s.Do(req) - if err != nil { - return params.RunnerScaleSet{}, fmt.Errorf("failed to create runner scale set: %w", err) - } - defer resp.Body.Close() - - var createdRunnerScaleSet params.RunnerScaleSet - if err := json.NewDecoder(resp.Body).Decode(&createdRunnerScaleSet); err != nil { - return params.RunnerScaleSet{}, fmt.Errorf("failed to decode response: %w", err) - } - return createdRunnerScaleSet, nil -} - -func (s *ScaleSetClient) UpdateRunnerScaleSet(ctx context.Context, runnerScaleSetID int, runnerScaleSet params.RunnerScaleSet) (params.RunnerScaleSet, error) { - path := fmt.Sprintf("%s/%d", scaleSetEndpoint, runnerScaleSetID) - - body, err := json.Marshal(runnerScaleSet) - if err != nil { - return params.RunnerScaleSet{}, fmt.Errorf("failed to marshal request: %w", err) - } - - req, err := s.newActionsRequest(ctx, http.MethodPatch, path, bytes.NewReader(body)) - if err != nil { - return params.RunnerScaleSet{}, fmt.Errorf("failed to create request: %w", err) - } - - resp, err := s.Do(req) - if err != nil { - return params.RunnerScaleSet{}, fmt.Errorf("failed to make request: %w", err) - } - defer resp.Body.Close() - - var ret params.RunnerScaleSet - if err := json.NewDecoder(resp.Body).Decode(&ret); err != nil { - return params.RunnerScaleSet{}, fmt.Errorf("failed to decode response: %w", err) - } - return ret, nil -} - -func (s *ScaleSetClient) DeleteRunnerScaleSet(ctx context.Context, runnerScaleSetID int) error { - path := fmt.Sprintf("%s/%d", scaleSetEndpoint, runnerScaleSetID) - req, err := s.newActionsRequest(ctx, http.MethodDelete, path, nil) - if err != nil { - return err - } - - client := &http.Client{} - resp, err := client.Do(req) - if err != nil { - return err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusNoContent { - return fmt.Errorf("failed to delete scale set with code %d", resp.StatusCode) - } - - return nil -} - -func (s *ScaleSetClient) GetRunnerGroupByName(ctx context.Context, runnerGroup string) (params.RunnerGroup, error) { - path := fmt.Sprintf("_apis/runtime/runnergroups/?groupName=%s", runnerGroup) - req, err := s.newActionsRequest(ctx, http.MethodGet, path, nil) - if err != nil { - return params.RunnerGroup{}, err - } - - resp, err := s.Do(req) - if err != nil { - return params.RunnerGroup{}, fmt.Errorf("failed to make request: %w", err) - } - defer resp.Body.Close() - - var runnerGroupList params.RunnerGroupList - err = json.NewDecoder(resp.Body).Decode(&runnerGroupList) - if err != nil { - return params.RunnerGroup{}, fmt.Errorf("failed to decode response: %w", err) - } - - if runnerGroupList.Count == 0 { - return params.RunnerGroup{}, runnerErrors.NewNotFoundError("runner group %s does not exist", runnerGroup) - } - - if runnerGroupList.Count > 1 { - return params.RunnerGroup{}, runnerErrors.NewConflictError("multiple runner groups exist with the same name (%s)", runnerGroup) - } - - return runnerGroupList.RunnerGroups[0], nil -} diff --git a/util/github/scalesets/token.go b/util/github/scalesets/token.go deleted file mode 100644 index 1491b748..00000000 --- a/util/github/scalesets/token.go +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright 2024 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package scalesets - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "io" - "net/http" - "time" - - "github.com/cloudbase/garm/params" -) - -func (s *ScaleSetClient) getActionServiceInfo(ctx context.Context) (params.ActionsServiceAdminInfoResponse, error) { - regPath := "/actions/runner-registration" - baseURL := s.ghCli.GithubBaseURL() - url, err := baseURL.Parse(regPath) - if err != nil { - return params.ActionsServiceAdminInfoResponse{}, fmt.Errorf("failed to parse url: %w", err) - } - - entity := s.ghCli.GetEntity() - body := params.ActionsServiceAdminInfoRequest{ - URL: entity.ForgeURL(), - RunnerEvent: "register", - } - - buf := &bytes.Buffer{} - enc := json.NewEncoder(buf) - enc.SetEscapeHTML(false) - - if err := enc.Encode(body); err != nil { - return params.ActionsServiceAdminInfoResponse{}, err - } - req, err := http.NewRequestWithContext(ctx, http.MethodPost, url.String(), buf) - if err != nil { - return params.ActionsServiceAdminInfoResponse{}, fmt.Errorf("failed to create request: %w", err) - } - - req.Header.Set("Content-Type", "application/json") - req.Header.Set("Authorization", fmt.Sprintf("RemoteAuth %s", *s.runnerRegistrationToken.Token)) - - resp, err := s.Do(req) - if err != nil { - return params.ActionsServiceAdminInfoResponse{}, fmt.Errorf("failed to get actions service admin info: %w", err) - } - defer resp.Body.Close() - - data, err := io.ReadAll(resp.Body) - if err != nil { - return params.ActionsServiceAdminInfoResponse{}, fmt.Errorf("failed to read response body: %w", err) - } - data = bytes.TrimPrefix(data, []byte("\xef\xbb\xbf")) - - var info params.ActionsServiceAdminInfoResponse - if err := json.Unmarshal(data, &info); err != nil { - return params.ActionsServiceAdminInfoResponse{}, fmt.Errorf("failed to decode response: %w", err) - } - - return info, nil -} - -func (s *ScaleSetClient) ensureAdminInfo(ctx context.Context) error { - s.mux.Lock() - defer s.mux.Unlock() - - var expiresAt time.Time - if s.runnerRegistrationToken != nil { - expiresAt = s.runnerRegistrationToken.GetExpiresAt().Time - } - - now := time.Now().UTC().Add(2 * time.Minute) - if now.After(expiresAt) || s.runnerRegistrationToken == nil { - token, _, err := s.ghCli.CreateEntityRegistrationToken(ctx) - if err != nil { - return fmt.Errorf("failed to fetch runner registration token: %w", err) - } - s.runnerRegistrationToken = token - } - - if s.actionsServiceInfo == nil || s.actionsServiceInfo.ExpiresIn(2*time.Minute) { - info, err := s.getActionServiceInfo(ctx) - if err != nil { - return fmt.Errorf("failed to get action service info: %w", err) - } - s.actionsServiceInfo = &info - } - - return nil -} diff --git a/util/github/scalesets/util.go b/util/github/scalesets/util.go deleted file mode 100644 index e8387e63..00000000 --- a/util/github/scalesets/util.go +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright 2024 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package scalesets - -import ( - "context" - "fmt" - "io" - "net/http" - "net/url" - "path" -) - -func (s *ScaleSetClient) newActionsRequest(ctx context.Context, method, uriPath string, body io.Reader) (*http.Request, error) { - if err := s.ensureAdminInfo(ctx); err != nil { - return nil, fmt.Errorf("failed to update token: %w", err) - } - - actionsURI, err := s.actionsServiceInfo.GetURL() - if err != nil { - return nil, fmt.Errorf("failed to get pipeline URL: %w", err) - } - - pathURI, err := url.Parse(uriPath) - if err != nil { - return nil, fmt.Errorf("failed to parse path: %w", err) - } - pathQuery := pathURI.Query() - baseQuery := actionsURI.Query() - for k, values := range pathQuery { - if baseQuery.Get(k) == "" { - for _, val := range values { - baseQuery.Add(k, val) - } - } - } - if baseQuery.Get("api-version") == "" { - baseQuery.Set("api-version", "6.0-preview") - } - - actionsURI.Path = path.Join(actionsURI.Path, pathURI.Path) - actionsURI.RawQuery = baseQuery.Encode() - - req, err := http.NewRequestWithContext(ctx, method, actionsURI.String(), body) - if err != nil { - return nil, err - } - - req.Header.Set("Content-Type", "application/json") - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", s.actionsServiceInfo.Token)) - - return req, nil -} diff --git a/util/logging.go b/util/logging.go index 99c69da7..ac35863b 100644 --- a/util/logging.go +++ b/util/logging.go @@ -1,17 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package util import ( @@ -25,58 +11,20 @@ const ( slogCtxFields slogContextKey = "slog_ctx_fields" ) -var _ slog.Handler = &SlogMultiHandler{} - -func WithSlogContext(ctx context.Context, attrs ...slog.Attr) context.Context { - return context.WithValue(ctx, slogCtxFields, attrs) +type ContextHandler struct { + slog.Handler } -type SlogMultiHandler struct { - Handlers []slog.Handler -} - -func (m *SlogMultiHandler) Enabled(ctx context.Context, level slog.Level) bool { - // Enabled if any handler is enabled - for _, h := range m.Handlers { - if h.Enabled(ctx, level) { - return true - } - } - return false -} - -func (m *SlogMultiHandler) Handle(ctx context.Context, r slog.Record) error { - record := r.Clone() +func (h ContextHandler) Handle(ctx context.Context, r slog.Record) error { attrs, ok := ctx.Value(slogCtxFields).([]slog.Attr) if ok { for _, v := range attrs { - record.AddAttrs(v) + r.AddAttrs(v) } } - - var firstErr error - for _, h := range m.Handlers { - if err := h.Handle(ctx, record); err != nil && firstErr == nil { - firstErr = err - } - } - return firstErr + return h.Handler.Handle(ctx, r) } -func (m *SlogMultiHandler) WithAttrs(attrs []slog.Attr) slog.Handler { - hs := make([]slog.Handler, len(m.Handlers)) - for i, h := range m.Handlers { - hs[i] = h.WithAttrs(attrs) - } - return &SlogMultiHandler{ - Handlers: hs, - } -} - -func (m *SlogMultiHandler) WithGroup(name string) slog.Handler { - hs := make([]slog.Handler, len(m.Handlers)) - for i, h := range m.Handlers { - hs[i] = h.WithGroup(name) - } - return &SlogMultiHandler{hs} +func WithContext(ctx context.Context, attrs ...slog.Attr) context.Context { + return context.WithValue(ctx, slogCtxFields, attrs) } diff --git a/util/util.go b/util/util.go index dc92ce0e..b6d8ac6d 100644 --- a/util/util.go +++ b/util/util.go @@ -16,97 +16,442 @@ package util import ( "context" + "encoding/base64" + "encoding/json" "fmt" + "log/slog" "net/http" - "unicode/utf8" + + "github.com/google/go-github/v72/github" + "github.com/pkg/errors" runnerErrors "github.com/cloudbase/garm-provider-common/errors" - commonParams "github.com/cloudbase/garm-provider-common/params" + "github.com/cloudbase/garm/metrics" + "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner/common" ) -func FetchTools(ctx context.Context, cli common.GithubClient) ([]commonParams.RunnerApplicationDownload, error) { - tools, ghResp, err := cli.ListEntityRunnerApplicationDownloads(ctx) +type githubClient struct { + *github.ActionsService + org *github.OrganizationsService + repo *github.RepositoriesService + enterprise *github.EnterpriseService + + entity params.GithubEntity +} + +func (g *githubClient) ListEntityHooks(ctx context.Context, opts *github.ListOptions) (ret []*github.Hook, response *github.Response, err error) { + metrics.GithubOperationCount.WithLabelValues( + "ListHooks", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + defer func() { + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "ListHooks", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + } + }() + switch g.entity.EntityType { + case params.GithubEntityTypeRepository: + ret, response, err = g.repo.ListHooks(ctx, g.entity.Owner, g.entity.Name, opts) + case params.GithubEntityTypeOrganization: + ret, response, err = g.org.ListHooks(ctx, g.entity.Owner, opts) + default: + return nil, nil, fmt.Errorf("invalid entity type: %s", g.entity.EntityType) + } + return ret, response, err +} + +func (g *githubClient) GetEntityHook(ctx context.Context, id int64) (ret *github.Hook, err error) { + metrics.GithubOperationCount.WithLabelValues( + "GetHook", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + defer func() { + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "GetHook", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + } + }() + switch g.entity.EntityType { + case params.GithubEntityTypeRepository: + ret, _, err = g.repo.GetHook(ctx, g.entity.Owner, g.entity.Name, id) + case params.GithubEntityTypeOrganization: + ret, _, err = g.org.GetHook(ctx, g.entity.Owner, id) + default: + return nil, errors.New("invalid entity type") + } + return ret, err +} + +func (g *githubClient) CreateEntityHook(ctx context.Context, hook *github.Hook) (ret *github.Hook, err error) { + metrics.GithubOperationCount.WithLabelValues( + "CreateHook", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + defer func() { + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "CreateHook", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + } + }() + switch g.entity.EntityType { + case params.GithubEntityTypeRepository: + ret, _, err = g.repo.CreateHook(ctx, g.entity.Owner, g.entity.Name, hook) + case params.GithubEntityTypeOrganization: + ret, _, err = g.org.CreateHook(ctx, g.entity.Owner, hook) + default: + return nil, errors.New("invalid entity type") + } + return ret, err +} + +func (g *githubClient) DeleteEntityHook(ctx context.Context, id int64) (ret *github.Response, err error) { + metrics.GithubOperationCount.WithLabelValues( + "DeleteHook", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + defer func() { + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "DeleteHook", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + } + }() + switch g.entity.EntityType { + case params.GithubEntityTypeRepository: + ret, err = g.repo.DeleteHook(ctx, g.entity.Owner, g.entity.Name, id) + case params.GithubEntityTypeOrganization: + ret, err = g.org.DeleteHook(ctx, g.entity.Owner, id) + default: + return nil, errors.New("invalid entity type") + } + return ret, err +} + +func (g *githubClient) PingEntityHook(ctx context.Context, id int64) (ret *github.Response, err error) { + metrics.GithubOperationCount.WithLabelValues( + "PingHook", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + defer func() { + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "PingHook", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + } + }() + switch g.entity.EntityType { + case params.GithubEntityTypeRepository: + ret, err = g.repo.PingHook(ctx, g.entity.Owner, g.entity.Name, id) + case params.GithubEntityTypeOrganization: + ret, err = g.org.PingHook(ctx, g.entity.Owner, id) + default: + return nil, errors.New("invalid entity type") + } + return ret, err +} + +func (g *githubClient) ListEntityRunners(ctx context.Context, opts *github.ListRunnersOptions) (*github.Runners, *github.Response, error) { + var ret *github.Runners + var response *github.Response + var err error + + metrics.GithubOperationCount.WithLabelValues( + "ListEntityRunners", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + defer func() { + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "ListEntityRunners", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + } + }() + + switch g.entity.EntityType { + case params.GithubEntityTypeRepository: + ret, response, err = g.ListRunners(ctx, g.entity.Owner, g.entity.Name, opts) + case params.GithubEntityTypeOrganization: + ret, response, err = g.ListOrganizationRunners(ctx, g.entity.Owner, opts) + case params.GithubEntityTypeEnterprise: + ret, response, err = g.enterprise.ListRunners(ctx, g.entity.Owner, opts) + default: + return nil, nil, errors.New("invalid entity type") + } + + return ret, response, err +} + +func (g *githubClient) ListEntityRunnerApplicationDownloads(ctx context.Context) ([]*github.RunnerApplicationDownload, *github.Response, error) { + var ret []*github.RunnerApplicationDownload + var response *github.Response + var err error + + metrics.GithubOperationCount.WithLabelValues( + "ListEntityRunnerApplicationDownloads", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + defer func() { + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "ListEntityRunnerApplicationDownloads", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + } + }() + + switch g.entity.EntityType { + case params.GithubEntityTypeRepository: + ret, response, err = g.ListRunnerApplicationDownloads(ctx, g.entity.Owner, g.entity.Name) + case params.GithubEntityTypeOrganization: + ret, response, err = g.ListOrganizationRunnerApplicationDownloads(ctx, g.entity.Owner) + case params.GithubEntityTypeEnterprise: + ret, response, err = g.enterprise.ListRunnerApplicationDownloads(ctx, g.entity.Owner) + default: + return nil, nil, errors.New("invalid entity type") + } + + return ret, response, err +} + +func (g *githubClient) RemoveEntityRunner(ctx context.Context, runnerID int64) (*github.Response, error) { + var response *github.Response + var err error + + metrics.GithubOperationCount.WithLabelValues( + "RemoveEntityRunner", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + defer func() { + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "RemoveEntityRunner", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + } + }() + + switch g.entity.EntityType { + case params.GithubEntityTypeRepository: + response, err = g.RemoveRunner(ctx, g.entity.Owner, g.entity.Name, runnerID) + case params.GithubEntityTypeOrganization: + response, err = g.RemoveOrganizationRunner(ctx, g.entity.Owner, runnerID) + case params.GithubEntityTypeEnterprise: + response, err = g.enterprise.RemoveRunner(ctx, g.entity.Owner, runnerID) + default: + return nil, errors.New("invalid entity type") + } + + return response, err +} + +func (g *githubClient) CreateEntityRegistrationToken(ctx context.Context) (*github.RegistrationToken, *github.Response, error) { + var ret *github.RegistrationToken + var response *github.Response + var err error + + metrics.GithubOperationCount.WithLabelValues( + "CreateEntityRegistrationToken", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + defer func() { + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "CreateEntityRegistrationToken", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + } + }() + + switch g.entity.EntityType { + case params.GithubEntityTypeRepository: + ret, response, err = g.CreateRegistrationToken(ctx, g.entity.Owner, g.entity.Name) + case params.GithubEntityTypeOrganization: + ret, response, err = g.CreateOrganizationRegistrationToken(ctx, g.entity.Owner) + case params.GithubEntityTypeEnterprise: + ret, response, err = g.enterprise.CreateRegistrationToken(ctx, g.entity.Owner) + default: + return nil, nil, errors.New("invalid entity type") + } + + return ret, response, err +} + +func (g *githubClient) getOrganizationRunnerGroupIDByName(ctx context.Context, entity params.GithubEntity, rgName string) (int64, error) { + opts := github.ListOrgRunnerGroupOptions{ + ListOptions: github.ListOptions{ + PerPage: 100, + }, + } + + for { + metrics.GithubOperationCount.WithLabelValues( + "ListOrganizationRunnerGroups", // label: operation + entity.LabelScope(), // label: scope + ).Inc() + runnerGroups, ghResp, err := g.ListOrganizationRunnerGroups(ctx, entity.Owner, &opts) + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "ListOrganizationRunnerGroups", // label: operation + entity.LabelScope(), // label: scope + ).Inc() + if ghResp != nil && ghResp.StatusCode == http.StatusUnauthorized { + return 0, errors.Wrap(runnerErrors.ErrUnauthorized, "fetching runners") + } + return 0, errors.Wrap(err, "fetching runners") + } + for _, runnerGroup := range runnerGroups.RunnerGroups { + if runnerGroup.Name != nil && *runnerGroup.Name == rgName { + return *runnerGroup.ID, nil + } + } + if ghResp.NextPage == 0 { + break + } + opts.Page = ghResp.NextPage + } + return 0, runnerErrors.NewNotFoundError("runner group %s not found", rgName) +} + +func (g *githubClient) getEnterpriseRunnerGroupIDByName(ctx context.Context, entity params.GithubEntity, rgName string) (int64, error) { + opts := github.ListEnterpriseRunnerGroupOptions{ + ListOptions: github.ListOptions{ + PerPage: 100, + }, + } + + for { + metrics.GithubOperationCount.WithLabelValues( + "ListRunnerGroups", // label: operation + entity.LabelScope(), // label: scope + ).Inc() + runnerGroups, ghResp, err := g.enterprise.ListRunnerGroups(ctx, entity.Owner, &opts) + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "ListRunnerGroups", // label: operation + entity.LabelScope(), // label: scope + ).Inc() + if ghResp != nil && ghResp.StatusCode == http.StatusUnauthorized { + return 0, errors.Wrap(runnerErrors.ErrUnauthorized, "fetching runners") + } + return 0, errors.Wrap(err, "fetching runners") + } + for _, runnerGroup := range runnerGroups.RunnerGroups { + if runnerGroup.Name != nil && *runnerGroup.Name == rgName { + return *runnerGroup.ID, nil + } + } + if ghResp.NextPage == 0 { + break + } + opts.Page = ghResp.NextPage + } + return 0, runnerErrors.NewNotFoundError("runner group not found") +} + +func (g *githubClient) GetEntityJITConfig(ctx context.Context, instance string, pool params.Pool, labels []string) (jitConfigMap map[string]string, runner *github.Runner, err error) { + // If no runner group is set, use the default runner group ID. This is also the default for + // repository level runners. + var rgID int64 = 1 + + if pool.GitHubRunnerGroup != "" { + switch g.entity.EntityType { + case params.GithubEntityTypeOrganization: + rgID, err = g.getOrganizationRunnerGroupIDByName(ctx, g.entity, pool.GitHubRunnerGroup) + case params.GithubEntityTypeEnterprise: + rgID, err = g.getEnterpriseRunnerGroupIDByName(ctx, g.entity, pool.GitHubRunnerGroup) + } + + if err != nil { + return nil, nil, fmt.Errorf("getting runner group ID: %w", err) + } + } + + req := github.GenerateJITConfigRequest{ + Name: instance, + RunnerGroupID: rgID, + Labels: labels, + // nolint:golangci-lint,godox + // TODO(gabriel-samfira): Should we make this configurable? + WorkFolder: github.Ptr("_work"), + } + + metrics.GithubOperationCount.WithLabelValues( + "GetEntityJITConfig", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + + var ret *github.JITRunnerConfig + var response *github.Response + + switch g.entity.EntityType { + case params.GithubEntityTypeRepository: + ret, response, err = g.GenerateRepoJITConfig(ctx, g.entity.Owner, g.entity.Name, &req) + case params.GithubEntityTypeOrganization: + ret, response, err = g.GenerateOrgJITConfig(ctx, g.entity.Owner, &req) + case params.GithubEntityTypeEnterprise: + ret, response, err = g.enterprise.GenerateEnterpriseJITConfig(ctx, g.entity.Owner, &req) + } if err != nil { - if ghResp != nil && ghResp.StatusCode == http.StatusUnauthorized { - return nil, fmt.Errorf("error fetching tools: %w", runnerErrors.ErrUnauthorized) + metrics.GithubOperationFailedCount.WithLabelValues( + "GetEntityJITConfig", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + if response != nil && response.StatusCode == http.StatusUnauthorized { + return nil, nil, fmt.Errorf("failed to get JIT config: %w", err) } - return nil, fmt.Errorf("error fetching runner tools: %w", err) + return nil, nil, fmt.Errorf("failed to get JIT config: %w", err) } - ret := []commonParams.RunnerApplicationDownload{} - for _, tool := range tools { - if tool == nil { - continue + defer func(run *github.Runner) { + if err != nil && run != nil { + _, innerErr := g.RemoveEntityRunner(ctx, run.GetID()) + slog.With(slog.Any("error", innerErr)).ErrorContext( + ctx, "failed to remove runner", + "runner_id", run.GetID(), string(g.entity.EntityType), g.entity.String()) } - ret = append(ret, commonParams.RunnerApplicationDownload(*tool)) + }(ret.Runner) + + decoded, err := base64.StdEncoding.DecodeString(*ret.EncodedJITConfig) + if err != nil { + return nil, nil, fmt.Errorf("failed to decode JIT config: %w", err) } - return ret, nil + + var jitConfig map[string]string + if err := json.Unmarshal(decoded, &jitConfig); err != nil { + return nil, nil, fmt.Errorf("failed to unmarshal JIT config: %w", err) + } + + return jitConfig, ret.Runner, nil } -func ASCIIEqualFold(s, t string) bool { - // Fast ASCII path for equal-length ASCII strings - if len(s) == len(t) && isASCII(s) && isASCII(t) { - for i := 0; i < len(s); i++ { - a, b := s[i], t[i] - if a != b { - if 'A' <= a && a <= 'Z' { - a = a + 'a' - 'A' - } - if 'A' <= b && b <= 'Z' { - b = b + 'a' - 'A' - } - if a != b { - return false - } - } - } - return true +func GithubClient(ctx context.Context, entity params.GithubEntity, credsDetails params.GithubCredentials) (common.GithubClient, error) { + httpClient, err := credsDetails.GetHTTPClient(ctx) + if err != nil { + return nil, errors.Wrap(err, "fetching http client") } - // UTF-8 path - handle different byte lengths correctly - i, j := 0, 0 - for i < len(s) && j < len(t) { - sr, sizeS := utf8.DecodeRuneInString(s[i:]) - tr, sizeT := utf8.DecodeRuneInString(t[j:]) - - // Handle invalid UTF-8 - they must be identical - if sr == utf8.RuneError || tr == utf8.RuneError { - // For invalid UTF-8, compare the raw bytes - if sr == utf8.RuneError && tr == utf8.RuneError { - if sizeS == sizeT && s[i:i+sizeS] == t[j:j+sizeT] { - i += sizeS - j += sizeT - continue - } - } - return false - } - - if sr != tr { - // Apply ASCII case folding only - if 'A' <= sr && sr <= 'Z' { - sr = sr + 'a' - 'A' - } - if 'A' <= tr && tr <= 'Z' { - tr = tr + 'a' - 'A' - } - if sr != tr { - return false - } - } - - i += sizeS - j += sizeT + ghClient, err := github.NewClient(httpClient).WithEnterpriseURLs(credsDetails.APIBaseURL, credsDetails.UploadBaseURL) + if err != nil { + return nil, errors.Wrap(err, "fetching github client") } - return i == len(s) && j == len(t) -} - -func isASCII(s string) bool { - for i := 0; i < len(s); i++ { - if s[i] >= 0x80 { - return false - } - } - return true + + cli := &githubClient{ + ActionsService: ghClient.Actions, + org: ghClient.Organizations, + repo: ghClient.Repositories, + enterprise: ghClient.Enterprise, + entity: entity, + } + return cli, nil } diff --git a/util/util_test.go b/util/util_test.go deleted file mode 100644 index f04dab84..00000000 --- a/util/util_test.go +++ /dev/null @@ -1,394 +0,0 @@ -// Copyright 2022 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package util - -import ( - "testing" -) - -func TestASCIIEqualFold(t *testing.T) { - tests := []struct { - name string - s string - t string - expected bool - reason string - }{ - // Basic ASCII case folding tests - { - name: "identical strings", - s: "hello", - t: "hello", - expected: true, - reason: "identical strings should match", - }, - { - name: "simple case difference", - s: "Hello", - t: "hello", - expected: true, - reason: "ASCII case folding should match H/h", - }, - { - name: "all uppercase vs lowercase", - s: "HELLO", - t: "hello", - expected: true, - reason: "ASCII case folding should match all cases", - }, - { - name: "mixed case", - s: "HeLLo", - t: "hEllO", - expected: true, - reason: "mixed case should match after folding", - }, - - // Empty string tests - { - name: "both empty", - s: "", - t: "", - expected: true, - reason: "empty strings should match", - }, - { - name: "one empty", - s: "hello", - t: "", - expected: false, - reason: "different length strings should not match", - }, - { - name: "other empty", - s: "", - t: "hello", - expected: false, - reason: "different length strings should not match", - }, - - // Different content tests - { - name: "different strings same case", - s: "hello", - t: "world", - expected: false, - reason: "different content should not match", - }, - { - name: "different strings different case", - s: "Hello", - t: "World", - expected: false, - reason: "different content should not match regardless of case", - }, - { - name: "different length", - s: "hello", - t: "hello world", - expected: false, - reason: "different length strings should not match", - }, - - // ASCII non-alphabetic characters - { - name: "numbers and symbols", - s: "Hello123!@#", - t: "hello123!@#", - expected: true, - reason: "numbers and symbols should be preserved, only letters folded", - }, - { - name: "different numbers", - s: "Hello123", - t: "Hello124", - expected: false, - reason: "different numbers should not match", - }, - { - name: "different symbols", - s: "Hello!", - t: "Hello?", - expected: false, - reason: "different symbols should not match", - }, - - // URL-specific tests (CORS security focus) - { - name: "HTTP scheme case", - s: "HTTP://example.com", - t: "http://example.com", - expected: true, - reason: "HTTP scheme should be case-insensitive", - }, - { - name: "HTTPS scheme case", - s: "HTTPS://EXAMPLE.COM", - t: "https://example.com", - expected: true, - reason: "HTTPS scheme and domain should be case-insensitive", - }, - { - name: "complex URL case", - s: "HTTPS://API.EXAMPLE.COM:8080/PATH", - t: "https://api.example.com:8080/path", - expected: true, - reason: "entire URL should be case-insensitive for ASCII", - }, - { - name: "subdomain case", - s: "https://API.SUB.EXAMPLE.COM", - t: "https://api.sub.example.com", - expected: true, - reason: "subdomains should be case-insensitive", - }, - - // Unicode security tests (homograph attack prevention) - { - name: "cyrillic homograph attack", - s: "https://еxample.com", // Cyrillic 'е' (U+0435) - t: "https://example.com", // Latin 'e' (U+0065) - expected: false, - reason: "should block Cyrillic homograph attack", - }, - { - name: "mixed cyrillic attack", - s: "https://ехample.com", // Cyrillic 'е' and 'х' - t: "https://example.com", // Latin 'e' and 'x' - expected: false, - reason: "should block mixed Cyrillic homograph attack", - }, - { - name: "cyrillic 'а' attack", - s: "https://exаmple.com", // Cyrillic 'а' (U+0430) - t: "https://example.com", // Latin 'a' (U+0061) - expected: false, - reason: "should block Cyrillic 'а' homograph attack", - }, - - // Unicode case folding security tests - { - name: "unicode case folding attack", - s: "https://CAFÉ.com", // Latin É (U+00C9) - t: "https://café.com", // Latin é (U+00E9) - expected: false, - reason: "should NOT perform Unicode case folding (security)", - }, - { - name: "turkish i attack", - s: "https://İSTANBUL.com", // Turkish İ (U+0130) - t: "https://istanbul.com", // Latin i - expected: false, - reason: "should NOT perform Turkish case folding", - }, - { - name: "german sharp s", - s: "https://GROß.com", // German ß (U+00DF) - t: "https://gross.com", // Expanded form - expected: false, - reason: "should NOT perform German ß expansion", - }, - - // Valid Unicode exact matches - { - name: "identical unicode", - s: "https://café.com", - t: "https://café.com", - expected: true, - reason: "identical Unicode strings should match", - }, - { - name: "identical cyrillic", - s: "https://пример.com", // Russian - t: "https://пример.com", // Russian - expected: true, - reason: "identical Cyrillic strings should match", - }, - { - name: "ascii part of unicode domain", - s: "HTTPS://café.COM", // ASCII parts should fold - t: "https://café.com", - expected: true, - reason: "ASCII parts should fold even in Unicode strings", - }, - - // Edge cases with UTF-8 - { - name: "different UTF-8 byte length same rune count", - s: "Café", // é is 2 bytes - t: "Café", // é is 2 bytes (same) - expected: true, - reason: "same Unicode content should match", - }, - { - name: "UTF-8 normalization difference", - s: "café\u0301", // é as e + combining acute (3 bytes for é part) - t: "café", // é as single character (2 bytes for é part) - expected: false, - reason: "different Unicode normalization should not match", - }, - { - name: "CRITICAL: current implementation flaw", - s: "ABC" + string([]byte{0xC3, 0xA9}), // ABC + é (2 bytes) = 5 bytes - t: "abc" + string([]byte{0xC3, 0xA9}), // abc + é (2 bytes) = 5 bytes - expected: true, - reason: "should match after ASCII folding (this should pass with correct implementation)", - }, - { - name: "invalid UTF-8 sequence", - s: "hello\xff", // Invalid UTF-8 - t: "hello\xff", // Invalid UTF-8 - expected: true, - reason: "identical invalid UTF-8 should match", - }, - { - name: "different invalid UTF-8", - s: "hello\xff", // Invalid UTF-8 - t: "hello\xfe", // Different invalid UTF-8 - expected: false, - reason: "different invalid UTF-8 should not match", - }, - - // ASCII boundary tests - { - name: "ascii boundary characters", - s: "A@Z[`a{z", // Test boundaries around A-Z - t: "a@z[`A{Z", - expected: true, - reason: "only A-Z should be folded, not punctuation", - }, - { - name: "digit boundaries", - s: "Test123ABC", - t: "test123abc", - expected: true, - reason: "digits should not be folded, only letters", - }, - - // Long string performance tests - { - name: "long ascii string", - s: "HTTP://" + repeatString("ABCDEFGHIJKLMNOPQRSTUVWXYZ", 100) + ".COM", - t: "http://" + repeatString("abcdefghijklmnopqrstuvwxyz", 100) + ".com", - expected: true, - reason: "long ASCII strings should be handled efficiently", - }, - { - name: "long unicode string", - s: repeatString("CAFÉ", 100), - t: repeatString("CAFÉ", 100), // Same case - should match - expected: true, - reason: "long identical Unicode strings should match", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := ASCIIEqualFold(tt.s, tt.t) - if result != tt.expected { - t.Errorf("ASCIIEqualFold(%q, %q) = %v, expected %v\nReason: %s", - tt.s, tt.t, result, tt.expected, tt.reason) - } - }) - } -} - -// Helper function for generating long test strings -func repeatString(s string, count int) string { - if count <= 0 { - return "" - } - result := make([]byte, 0, len(s)*count) - for i := 0; i < count; i++ { - result = append(result, s...) - } - return string(result) -} - -// Benchmark tests for performance verification -func BenchmarkASCIIEqualFold(b *testing.B) { - benchmarks := []struct { - name string - s string - t string - }{ - { - name: "short_ascii_match", - s: "HTTP://EXAMPLE.COM", - t: "http://example.com", - }, - { - name: "short_ascii_nomatch", - s: "HTTP://EXAMPLE.COM", - t: "http://different.com", - }, - { - name: "long_ascii_match", - s: "HTTP://" + repeatString("ABCDEFGHIJKLMNOPQRSTUVWXYZ", 100) + ".COM", - t: "http://" + repeatString("abcdefghijklmnopqrstuvwxyz", 100) + ".com", - }, - { - name: "unicode_nomatch", - s: "https://café.com", - t: "https://CAFÉ.com", - }, - { - name: "unicode_exact_match", - s: "https://café.com", - t: "https://café.com", - }, - } - - for _, bm := range benchmarks { - b.Run(bm.name, func(b *testing.B) { - for i := 0; i < b.N; i++ { - ASCIIEqualFold(bm.s, bm.t) - } - }) - } -} - -// Fuzzing test to catch edge cases -func FuzzASCIIEqualFold(f *testing.F) { - // Seed with interesting test cases - seeds := [][]string{ - {"hello", "HELLO"}, - {"", ""}, - {"café", "CAFÉ"}, - {"https://example.com", "HTTPS://EXAMPLE.COM"}, - {"еxample", "example"}, // Cyrillic attack - {string([]byte{0xff}), string([]byte{0xfe})}, // Invalid UTF-8 - } - - for _, seed := range seeds { - f.Add(seed[0], seed[1]) - } - - f.Fuzz(func(t *testing.T, s1, s2 string) { - // Just ensure it doesn't panic and returns a boolean - result := ASCIIEqualFold(s1, s2) - _ = result // Use the result to prevent optimization - - // Property: function should be symmetric - if ASCIIEqualFold(s1, s2) != ASCIIEqualFold(s2, s1) { - t.Errorf("ASCIIEqualFold is not symmetric: (%q, %q)", s1, s2) - } - - // Property: identical strings should always match - if s1 == s2 && !ASCIIEqualFold(s1, s2) { - t.Errorf("identical strings should match: %q", s1) - } - }) -} diff --git a/vendor/github.com/cloudbase/garm-provider-common/errors/errors.go b/vendor/github.com/cloudbase/garm-provider-common/errors/errors.go index 76e85d9c..9f98c33a 100644 --- a/vendor/github.com/cloudbase/garm-provider-common/errors/errors.go +++ b/vendor/github.com/cloudbase/garm-provider-common/errors/errors.go @@ -29,9 +29,9 @@ var ( // ErrBadRequest is returned is a malformed request is sent ErrBadRequest = NewBadRequestError("invalid request") // ErrTimeout is returned when a timeout occurs. - ErrTimeout = NewTimeoutError("timed out") - ErrUnprocessable = NewUnprocessableError("cannot process request") - ErrNoPoolsAvailable = NewNoPoolsAvailableError("no pools available") + ErrTimeout = fmt.Errorf("timed out") + ErrUnprocessable = fmt.Errorf("cannot process request") + ErrNoPoolsAvailable = fmt.Errorf("no pools available") ) type baseError struct { @@ -56,15 +56,6 @@ type ProviderError struct { baseError } -func (p *ProviderError) Is(target error) bool { - if target == nil { - return false - } - - _, ok := target.(*ProviderError) - return ok -} - // NewMissingSecretError returns a new MissingSecretError func NewMissingSecretError(msg string, a ...interface{}) error { return &MissingSecretError{ @@ -79,15 +70,6 @@ type MissingSecretError struct { baseError } -func (p *MissingSecretError) Is(target error) bool { - if target == nil { - return false - } - - _, ok := target.(*MissingSecretError) - return ok -} - // NewUnauthorizedError returns a new UnauthorizedError func NewUnauthorizedError(msg string) error { return &UnauthorizedError{ @@ -102,15 +84,6 @@ type UnauthorizedError struct { baseError } -func (p *UnauthorizedError) Is(target error) bool { - if target == nil { - return false - } - - _, ok := target.(*UnauthorizedError) - return ok -} - // NewNotFoundError returns a new NotFoundError func NewNotFoundError(msg string, a ...interface{}) error { return &NotFoundError{ @@ -125,15 +98,6 @@ type NotFoundError struct { baseError } -func (p *NotFoundError) Is(target error) bool { - if target == nil { - return false - } - - _, ok := target.(*NotFoundError) - return ok -} - // NewDuplicateUserError returns a new DuplicateUserError func NewDuplicateUserError(msg string) error { return &DuplicateUserError{ @@ -148,15 +112,6 @@ type DuplicateUserError struct { baseError } -func (p *DuplicateUserError) Is(target error) bool { - if target == nil { - return false - } - - _, ok := target.(*DuplicateUserError) - return ok -} - // NewBadRequestError returns a new BadRequestError func NewBadRequestError(msg string, a ...interface{}) error { return &BadRequestError{ @@ -171,15 +126,6 @@ type BadRequestError struct { baseError } -func (p *BadRequestError) Is(target error) bool { - if target == nil { - return false - } - - _, ok := target.(*BadRequestError) - return ok -} - // NewConflictError returns a new ConflictError func NewConflictError(msg string, a ...interface{}) error { return &ConflictError{ @@ -193,81 +139,3 @@ func NewConflictError(msg string, a ...interface{}) error { type ConflictError struct { baseError } - -func (p *ConflictError) Is(target error) bool { - if target == nil { - return false - } - - _, ok := target.(*ConflictError) - return ok -} - -// NewTimeoutError returns a new TimoutError -func NewTimeoutError(msg string, a ...interface{}) error { - return &TimoutError{ - baseError{ - msg: fmt.Sprintf(msg, a...), - }, - } -} - -// TimoutError is returned when an operation times out. -type TimoutError struct { - baseError -} - -func (p *TimoutError) Is(target error) bool { - if target == nil { - return false - } - - _, ok := target.(*TimoutError) - return ok -} - -// NewUnprocessableError returns a new UnprocessableError -func NewUnprocessableError(msg string, a ...interface{}) error { - return &TimoutError{ - baseError{ - msg: fmt.Sprintf(msg, a...), - }, - } -} - -// TimoutError is returned when an operation times out. -type UnprocessableError struct { - baseError -} - -func (p *UnprocessableError) Is(target error) bool { - if target == nil { - return false - } - - _, ok := target.(*UnprocessableError) - return ok -} - -// NewNoPoolsAvailableError returns a new UnprocessableError -func NewNoPoolsAvailableError(msg string, a ...interface{}) error { - return &TimoutError{ - baseError{ - msg: fmt.Sprintf(msg, a...), - }, - } -} - -// NoPoolsAvailableError is returned when anthere are not pools available. -type NoPoolsAvailableError struct { - baseError -} - -func (p *NoPoolsAvailableError) Is(target error) bool { - if target == nil { - return false - } - - _, ok := target.(*NoPoolsAvailableError) - return ok -} diff --git a/vendor/github.com/go-openapi/errors/.golangci.yml b/vendor/github.com/go-openapi/errors/.golangci.yml index 60798c21..ee8b9bd1 100644 --- a/vendor/github.com/go-openapi/errors/.golangci.yml +++ b/vendor/github.com/go-openapi/errors/.golangci.yml @@ -1,75 +1,55 @@ -version: "2" +linters-settings: + gocyclo: + min-complexity: 45 + dupl: + threshold: 200 + goconst: + min-len: 2 + min-occurrences: 3 + linters: - default: all + enable-all: true disable: - - cyclop + - unparam + - lll + - gochecknoinits + - gochecknoglobals + - funlen + - godox + - gocognit + - whitespace + - wsl + - wrapcheck + - testpackage + - nlreturn + - errorlint + - nestif + - godot + - gofumpt + - paralleltest + - tparallel + - thelper + - exhaustruct + - varnamelen + - gci - depguard - errchkjson - - errorlint - - exhaustruct - - forcetypeassert - - funlen - - gochecknoglobals - - gochecknoinits - - gocognit - - godot - - godox - - gosmopolitan - inamedparam - - intrange # disabled while < go1.22 - - ireturn - - lll - - musttag - - nestif - - nlreturn - - noinlineerr - nonamedreturns - - paralleltest - - recvcheck - - testpackage - - thelper - - tparallel - - unparam - - varnamelen - - whitespace - - wrapcheck - - wsl - - wsl_v5 - settings: - dupl: - threshold: 200 - goconst: - min-len: 2 - min-occurrences: 3 - gocyclo: - min-complexity: 45 - exclusions: - generated: lax - presets: - - comments - - common-false-positives - - legacy - - std-error-handling - paths: - - third_party$ - - builtin$ - - examples$ -formatters: - enable: - - gofmt - - goimports - exclusions: - generated: lax - paths: - - third_party$ - - builtin$ - - examples$ -issues: - # Maximum issues count per one linter. - # Set to 0 to disable. - # Default: 50 - max-issues-per-linter: 0 - # Maximum count of issues with the same text. - # Set to 0 to disable. - # Default: 3 - max-same-issues: 0 + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + #- deadcode + #- interfacer + #- scopelint + #- varcheck + #- structcheck + #- golint + #- nosnakecase + #- maligned + #- goerr113 + #- ifshort + #- gomnd + #- exhaustivestruct diff --git a/vendor/github.com/go-openapi/errors/middleware.go b/vendor/github.com/go-openapi/errors/middleware.go index 1b9f3a93..67f80386 100644 --- a/vendor/github.com/go-openapi/errors/middleware.go +++ b/vendor/github.com/go-openapi/errors/middleware.go @@ -35,7 +35,7 @@ func (v *APIVerificationFailed) Error() string { hasSpecMissing := len(v.MissingSpecification) > 0 if hasRegMissing { - fmt.Fprintf(buf, "missing [%s] %s registrations", strings.Join(v.MissingRegistration, ", "), v.Section) + buf.WriteString(fmt.Sprintf("missing [%s] %s registrations", strings.Join(v.MissingRegistration, ", "), v.Section)) } if hasRegMissing && hasSpecMissing { @@ -43,7 +43,7 @@ func (v *APIVerificationFailed) Error() string { } if hasSpecMissing { - fmt.Fprintf(buf, "missing from spec file [%s] %s", strings.Join(v.MissingSpecification, ", "), v.Section) + buf.WriteString(fmt.Sprintf("missing from spec file [%s] %s", strings.Join(v.MissingSpecification, ", "), v.Section)) } return buf.String() diff --git a/vendor/github.com/go-openapi/errors/parsing.go b/vendor/github.com/go-openapi/errors/parsing.go index 34930c08..ce1ef9cb 100644 --- a/vendor/github.com/go-openapi/errors/parsing.go +++ b/vendor/github.com/go-openapi/errors/parsing.go @@ -30,24 +30,6 @@ type ParseError struct { message string } -// NewParseError creates a new parse error -func NewParseError(name, in, value string, reason error) *ParseError { - var msg string - if in == "" { - msg = fmt.Sprintf(parseErrorTemplContentNoIn, name, value, reason) - } else { - msg = fmt.Sprintf(parseErrorTemplContent, name, in, value, reason) - } - return &ParseError{ - code: http.StatusBadRequest, - Name: name, - In: in, - Value: value, - Reason: reason, - message: msg, - } -} - func (e *ParseError) Error() string { return e.message } @@ -77,3 +59,21 @@ const ( parseErrorTemplContent = `parsing %s %s from %q failed, because %s` parseErrorTemplContentNoIn = `parsing %s from %q failed, because %s` ) + +// NewParseError creates a new parse error +func NewParseError(name, in, value string, reason error) *ParseError { + var msg string + if in == "" { + msg = fmt.Sprintf(parseErrorTemplContentNoIn, name, value, reason) + } else { + msg = fmt.Sprintf(parseErrorTemplContent, name, in, value, reason) + } + return &ParseError{ + code: http.StatusBadRequest, + Name: name, + In: in, + Value: value, + Reason: reason, + message: msg, + } +} diff --git a/vendor/github.com/go-openapi/jsonpointer/.golangci.yml b/vendor/github.com/go-openapi/jsonpointer/.golangci.yml index 50063062..d2fafb8a 100644 --- a/vendor/github.com/go-openapi/jsonpointer/.golangci.yml +++ b/vendor/github.com/go-openapi/jsonpointer/.golangci.yml @@ -1,62 +1,56 @@ -version: "2" +linters-settings: + gocyclo: + min-complexity: 45 + dupl: + threshold: 200 + goconst: + min-len: 2 + min-occurrences: 3 + linters: - default: all + enable-all: true disable: - - cyclop + - recvcheck + - unparam + - lll + - gochecknoinits + - gochecknoglobals + - funlen + - godox + - gocognit + - whitespace + - wsl + - wrapcheck + - testpackage + - nlreturn + - errorlint + - nestif + - godot + - gofumpt + - paralleltest + - tparallel + - thelper + - exhaustruct + - varnamelen + - gci - depguard - errchkjson - - errorlint - - exhaustruct - - forcetypeassert - - funlen - - gochecknoglobals - - gochecknoinits - - gocognit - - godot - - godox - - gosmopolitan - inamedparam - - ireturn - - lll - - musttag - - nestif - - nlreturn - nonamedreturns - - paralleltest - - testpackage - - thelper - - tparallel - - unparam - - varnamelen - - whitespace - - wrapcheck - - wsl - settings: - dupl: - threshold: 200 - goconst: - min-len: 2 - min-occurrences: 3 - gocyclo: - min-complexity: 45 - exclusions: - generated: lax - presets: - - comments - - common-false-positives - - legacy - - std-error-handling - paths: - - third_party$ - - builtin$ - - examples$ -formatters: - enable: - - gofmt - - goimports - exclusions: - generated: lax - paths: - - third_party$ - - builtin$ - - examples$ + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + #- deadcode + #- interfacer + #- scopelint + #- varcheck + #- structcheck + #- golint + #- nosnakecase + #- maligned + #- goerr113 + #- ifshort + #- gomnd + #- exhaustivestruct diff --git a/vendor/github.com/go-openapi/jsonpointer/pointer.go b/vendor/github.com/go-openapi/jsonpointer/pointer.go index 61362105..a08cd68a 100644 --- a/vendor/github.com/go-openapi/jsonpointer/pointer.go +++ b/vendor/github.com/go-openapi/jsonpointer/pointer.go @@ -179,11 +179,6 @@ func getSingleImpl(node any, decodedToken string, nameProvider *swag.NameProvide func setSingleImpl(node, data any, decodedToken string, nameProvider *swag.NameProvider) error { rValue := reflect.Indirect(reflect.ValueOf(node)) - // Check for nil to prevent panic when calling rValue.Type() - if isNil(node) { - return fmt.Errorf("cannot set field %q on nil value: %w", decodedToken, ErrPointer) - } - if ns, ok := node.(JSONSetable); ok { // pointer impl return ns.JSONSet(decodedToken, data) } @@ -290,11 +285,6 @@ func (p *Pointer) set(node, data any, nameProvider *swag.NameProvider) error { return setSingleImpl(node, data, decodedToken, nameProvider) } - // Check for nil during traversal - if isNil(node) { - return fmt.Errorf("cannot traverse through nil value at %q: %w", decodedToken, ErrPointer) - } - rValue := reflect.Indirect(reflect.ValueOf(node)) kind := rValue.Kind() diff --git a/vendor/github.com/golang-jwt/jwt/v5/errors.go b/vendor/github.com/golang-jwt/jwt/v5/errors.go index 14e00751..23bb616d 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/errors.go +++ b/vendor/github.com/golang-jwt/jwt/v5/errors.go @@ -2,7 +2,6 @@ package jwt import ( "errors" - "fmt" "strings" ) @@ -48,42 +47,3 @@ func joinErrors(errs ...error) error { errs: errs, } } - -// Unwrap implements the multiple error unwrapping for this error type, which is -// possible in Go 1.20. -func (je joinedError) Unwrap() []error { - return je.errs -} - -// newError creates a new error message with a detailed error message. The -// message will be prefixed with the contents of the supplied error type. -// Additionally, more errors, that provide more context can be supplied which -// will be appended to the message. This makes use of Go 1.20's possibility to -// include more than one %w formatting directive in [fmt.Errorf]. -// -// For example, -// -// newError("no keyfunc was provided", ErrTokenUnverifiable) -// -// will produce the error string -// -// "token is unverifiable: no keyfunc was provided" -func newError(message string, err error, more ...error) error { - var format string - var args []any - if message != "" { - format = "%w: %s" - args = []any{err, message} - } else { - format = "%w" - args = []any{err} - } - - for _, e := range more { - format += ": %w" - args = append(args, e) - } - - err = fmt.Errorf(format, args...) - return err -} diff --git a/vendor/github.com/golang-jwt/jwt/v5/errors_go1_20.go b/vendor/github.com/golang-jwt/jwt/v5/errors_go1_20.go new file mode 100644 index 00000000..a893d355 --- /dev/null +++ b/vendor/github.com/golang-jwt/jwt/v5/errors_go1_20.go @@ -0,0 +1,47 @@ +//go:build go1.20 +// +build go1.20 + +package jwt + +import ( + "fmt" +) + +// Unwrap implements the multiple error unwrapping for this error type, which is +// possible in Go 1.20. +func (je joinedError) Unwrap() []error { + return je.errs +} + +// newError creates a new error message with a detailed error message. The +// message will be prefixed with the contents of the supplied error type. +// Additionally, more errors, that provide more context can be supplied which +// will be appended to the message. This makes use of Go 1.20's possibility to +// include more than one %w formatting directive in [fmt.Errorf]. +// +// For example, +// +// newError("no keyfunc was provided", ErrTokenUnverifiable) +// +// will produce the error string +// +// "token is unverifiable: no keyfunc was provided" +func newError(message string, err error, more ...error) error { + var format string + var args []any + if message != "" { + format = "%w: %s" + args = []any{err, message} + } else { + format = "%w" + args = []any{err} + } + + for _, e := range more { + format += ": %w" + args = append(args, e) + } + + err = fmt.Errorf(format, args...) + return err +} diff --git a/vendor/github.com/golang-jwt/jwt/v5/errors_go_other.go b/vendor/github.com/golang-jwt/jwt/v5/errors_go_other.go new file mode 100644 index 00000000..2ad542f0 --- /dev/null +++ b/vendor/github.com/golang-jwt/jwt/v5/errors_go_other.go @@ -0,0 +1,78 @@ +//go:build !go1.20 +// +build !go1.20 + +package jwt + +import ( + "errors" + "fmt" +) + +// Is implements checking for multiple errors using [errors.Is], since multiple +// error unwrapping is not possible in versions less than Go 1.20. +func (je joinedError) Is(err error) bool { + for _, e := range je.errs { + if errors.Is(e, err) { + return true + } + } + + return false +} + +// wrappedErrors is a workaround for wrapping multiple errors in environments +// where Go 1.20 is not available. It basically uses the already implemented +// functionality of joinedError to handle multiple errors with supplies a +// custom error message that is identical to the one we produce in Go 1.20 using +// multiple %w directives. +type wrappedErrors struct { + msg string + joinedError +} + +// Error returns the stored error string +func (we wrappedErrors) Error() string { + return we.msg +} + +// newError creates a new error message with a detailed error message. The +// message will be prefixed with the contents of the supplied error type. +// Additionally, more errors, that provide more context can be supplied which +// will be appended to the message. Since we cannot use of Go 1.20's possibility +// to include more than one %w formatting directive in [fmt.Errorf], we have to +// emulate that. +// +// For example, +// +// newError("no keyfunc was provided", ErrTokenUnverifiable) +// +// will produce the error string +// +// "token is unverifiable: no keyfunc was provided" +func newError(message string, err error, more ...error) error { + // We cannot wrap multiple errors here with %w, so we have to be a little + // bit creative. Basically, we are using %s instead of %w to produce the + // same error message and then throw the result into a custom error struct. + var format string + var args []any + if message != "" { + format = "%s: %s" + args = []any{err, message} + } else { + format = "%s" + args = []any{err} + } + errs := []error{err} + + for _, e := range more { + format += ": %s" + args = append(args, e) + errs = append(errs, e) + } + + err = &wrappedErrors{ + msg: fmt.Sprintf(format, args...), + joinedError: joinedError{errs: errs}, + } + return err +} diff --git a/vendor/github.com/golang-jwt/jwt/v5/rsa_pss.go b/vendor/github.com/golang-jwt/jwt/v5/rsa_pss.go index f17590cc..7c216ae0 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/rsa_pss.go +++ b/vendor/github.com/golang-jwt/jwt/v5/rsa_pss.go @@ -1,3 +1,6 @@ +//go:build go1.4 +// +build go1.4 + package jwt import ( diff --git a/vendor/github.com/jedib0t/go-pretty/v6/text/escape_seq_parser.go b/vendor/github.com/jedib0t/go-pretty/v6/text/escape_seq_parser.go index c6ffa437..ab618acc 100644 --- a/vendor/github.com/jedib0t/go-pretty/v6/text/escape_seq_parser.go +++ b/vendor/github.com/jedib0t/go-pretty/v6/text/escape_seq_parser.go @@ -78,16 +78,6 @@ func (s *escSeqParser) Consume(char rune) { if s.inEscSeq { s.escapeSeq += string(char) - // --- FIX for OSC 8 hyperlinks (e.g. \x1b]8;;url\x07label\x1b]8;;\x07) - if s.escSeqKind == escSeqKindOSI && - strings.HasPrefix(s.escapeSeq, escapeStartConcealOSI) && - char == '\a' { // BEL - - s.ParseSeq(s.escapeSeq, s.escSeqKind) - s.Reset() - return - } - if s.isEscapeStopRune(char) { s.ParseSeq(s.escapeSeq, s.escSeqKind) s.Reset() diff --git a/vendor/github.com/juju/clock/.gitignore b/vendor/github.com/juju/clock/.gitignore new file mode 100644 index 00000000..1d74e219 --- /dev/null +++ b/vendor/github.com/juju/clock/.gitignore @@ -0,0 +1 @@ +.vscode/ diff --git a/vendor/github.com/juju/clock/LICENSE b/vendor/github.com/juju/clock/LICENSE new file mode 100644 index 00000000..ade9307b --- /dev/null +++ b/vendor/github.com/juju/clock/LICENSE @@ -0,0 +1,191 @@ +All files in this repository are licensed as follows. If you contribute +to this repository, it is assumed that you license your contribution +under the same license unless you state otherwise. + +All files Copyright (C) 2015 Canonical Ltd. unless otherwise specified in the file. + +This software is licensed under the LGPLv3, included below. + +As a special exception to the GNU Lesser General Public License version 3 +("LGPL3"), the copyright holders of this Library give you permission to +convey to a third party a Combined Work that links statically or dynamically +to this Library without providing any Minimal Corresponding Source or +Minimal Application Code as set out in 4d or providing the installation +information set out in section 4e, provided that you comply with the other +provisions of LGPL3 and provided that you meet, for the Application the +terms and conditions of the license(s) which apply to the Application. + +Except as stated in this special exception, the provisions of LGPL3 will +continue to comply in full to this Library. If you modify this Library, you +may apply this exception to your version of this Library, but you are not +obliged to do so. If you do not wish to do so, delete this exception +statement from your version. This exception does not (and cannot) modify any +license terms which apply to the Application, with which you must still +comply. + + + GNU LESSER GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + + This version of the GNU Lesser General Public License incorporates +the terms and conditions of version 3 of the GNU General Public +License, supplemented by the additional permissions listed below. + + 0. Additional Definitions. + + As used herein, "this License" refers to version 3 of the GNU Lesser +General Public License, and the "GNU GPL" refers to version 3 of the GNU +General Public License. + + "The Library" refers to a covered work governed by this License, +other than an Application or a Combined Work as defined below. + + An "Application" is any work that makes use of an interface provided +by the Library, but which is not otherwise based on the Library. +Defining a subclass of a class defined by the Library is deemed a mode +of using an interface provided by the Library. + + A "Combined Work" is a work produced by combining or linking an +Application with the Library. The particular version of the Library +with which the Combined Work was made is also called the "Linked +Version". + + The "Minimal Corresponding Source" for a Combined Work means the +Corresponding Source for the Combined Work, excluding any source code +for portions of the Combined Work that, considered in isolation, are +based on the Application, and not on the Linked Version. + + The "Corresponding Application Code" for a Combined Work means the +object code and/or source code for the Application, including any data +and utility programs needed for reproducing the Combined Work from the +Application, but excluding the System Libraries of the Combined Work. + + 1. Exception to Section 3 of the GNU GPL. + + You may convey a covered work under sections 3 and 4 of this License +without being bound by section 3 of the GNU GPL. + + 2. Conveying Modified Versions. + + If you modify a copy of the Library, and, in your modifications, a +facility refers to a function or data to be supplied by an Application +that uses the facility (other than as an argument passed when the +facility is invoked), then you may convey a copy of the modified +version: + + a) under this License, provided that you make a good faith effort to + ensure that, in the event an Application does not supply the + function or data, the facility still operates, and performs + whatever part of its purpose remains meaningful, or + + b) under the GNU GPL, with none of the additional permissions of + this License applicable to that copy. + + 3. Object Code Incorporating Material from Library Header Files. + + The object code form of an Application may incorporate material from +a header file that is part of the Library. You may convey such object +code under terms of your choice, provided that, if the incorporated +material is not limited to numerical parameters, data structure +layouts and accessors, or small macros, inline functions and templates +(ten or fewer lines in length), you do both of the following: + + a) Give prominent notice with each copy of the object code that the + Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the object code with a copy of the GNU GPL and this license + document. + + 4. Combined Works. + + You may convey a Combined Work under terms of your choice that, +taken together, effectively do not restrict modification of the +portions of the Library contained in the Combined Work and reverse +engineering for debugging such modifications, if you also do each of +the following: + + a) Give prominent notice with each copy of the Combined Work that + the Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the Combined Work with a copy of the GNU GPL and this license + document. + + c) For a Combined Work that displays copyright notices during + execution, include the copyright notice for the Library among + these notices, as well as a reference directing the user to the + copies of the GNU GPL and this license document. + + d) Do one of the following: + + 0) Convey the Minimal Corresponding Source under the terms of this + License, and the Corresponding Application Code in a form + suitable for, and under terms that permit, the user to + recombine or relink the Application with a modified version of + the Linked Version to produce a modified Combined Work, in the + manner specified by section 6 of the GNU GPL for conveying + Corresponding Source. + + 1) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (a) uses at run time + a copy of the Library already present on the user's computer + system, and (b) will operate properly with a modified version + of the Library that is interface-compatible with the Linked + Version. + + e) Provide Installation Information, but only if you would otherwise + be required to provide such information under section 6 of the + GNU GPL, and only to the extent that such information is + necessary to install and execute a modified version of the + Combined Work produced by recombining or relinking the + Application with a modified version of the Linked Version. (If + you use option 4d0, the Installation Information must accompany + the Minimal Corresponding Source and Corresponding Application + Code. If you use option 4d1, you must provide the Installation + Information in the manner specified by section 6 of the GNU GPL + for conveying Corresponding Source.) + + 5. Combined Libraries. + + You may place library facilities that are a work based on the +Library side by side in a single library together with other library +facilities that are not Applications and are not covered by this +License, and convey such a combined library under terms of your +choice, if you do both of the following: + + a) Accompany the combined library with a copy of the same work based + on the Library, uncombined with any other library facilities, + conveyed under the terms of this License. + + b) Give prominent notice with the combined library that part of it + is a work based on the Library, and explaining where to find the + accompanying uncombined form of the same work. + + 6. Revised Versions of the GNU Lesser General Public License. + + The Free Software Foundation may publish revised and/or new versions +of the GNU Lesser General Public License from time to time. Such new +versions will be similar in spirit to the present version, but may +differ in detail to address new problems or concerns. + + Each version is given a distinguishing version number. If the +Library as you received it specifies that a certain numbered version +of the GNU Lesser General Public License "or any later version" +applies to it, you have the option of following the terms and +conditions either of that published version or of any later version +published by the Free Software Foundation. If the Library as you +received it does not specify a version number of the GNU Lesser +General Public License, you may choose any version of the GNU Lesser +General Public License ever published by the Free Software Foundation. + + If the Library as you received it specifies that a proxy can decide +whether future versions of the GNU Lesser General Public License shall +apply, that proxy's public statement of acceptance of any version is +permanent authorization for you to choose that version for the +Library. diff --git a/vendor/github.com/juju/clock/Makefile b/vendor/github.com/juju/clock/Makefile new file mode 100644 index 00000000..900ccf75 --- /dev/null +++ b/vendor/github.com/juju/clock/Makefile @@ -0,0 +1,20 @@ +PROJECT := github.com/juju/clock + +.PHONY: check-licence check-go check + +check: check-licence check-go + go test $(PROJECT)/... + +check-licence: + @(fgrep -rl "Licensed under the LGPLv3" --exclude *.s .;\ + fgrep -rl "MACHINE GENERATED BY THE COMMAND ABOVE; DO NOT EDIT" --exclude *.s .;\ + find . -name "*.go") | sed -e 's,\./,,' | sort | uniq -u | \ + xargs -I {} echo FAIL: licence missed: {} + +check-go: + $(eval GOFMT := $(strip $(shell gofmt -l .| sed -e "s/^/ /g"))) + @(if [ x$(GOFMT) != x"" ]; then \ + echo go fmt is sad: $(GOFMT); \ + exit 1; \ + fi ) + @(go vet -all -composites=false -copylocks=false .) diff --git a/vendor/github.com/juju/clock/README.md b/vendor/github.com/juju/clock/README.md new file mode 100644 index 00000000..a5ac464d --- /dev/null +++ b/vendor/github.com/juju/clock/README.md @@ -0,0 +1,7 @@ +# clock + +An interface definition for a fully defined clock. + +An WallClock implementation of that interface using the time package. + +A testing clock. diff --git a/vendor/github.com/juju/clock/clock.go b/vendor/github.com/juju/clock/clock.go new file mode 100644 index 00000000..6303cf65 --- /dev/null +++ b/vendor/github.com/juju/clock/clock.go @@ -0,0 +1,77 @@ +// Copyright 2015 Canonical Ltd. +// Licensed under the LGPLv3, see LICENCE file for details. + +package clock + +import "time" + +// Clock provides an interface for dealing with clocks. +type Clock interface { + // Now returns the current clock time. + Now() time.Time + + // After waits for the duration to elapse and then sends the + // current time on the returned channel. + After(time.Duration) <-chan time.Time + + // AfterFunc waits for the duration to elapse and then calls f in its own goroutine. + // It returns a Timer that can be used to cancel the call using its Stop method. + AfterFunc(d time.Duration, f func()) Timer + + // NewTimer creates a new Timer that will send the current time + // on its channel after at least duration d. + NewTimer(d time.Duration) Timer + + // At waits for the time to pass and then sends the + // current time on the returned channel. + At(t time.Time) <-chan time.Time + + // AtFunc waits for the time to pass and then calls f in its own goroutine. + // It returns an Alarm that can be used to cancel the call using its Stop method. + AtFunc(t time.Time, f func()) Alarm + + // NewAlarm creates a new Alarm that will send the current time + // on its channel at or after time t has passed. + NewAlarm(t time.Time) Alarm +} + +// Timer type represents a single event. +// Timers must be created with AfterFunc or NewTimer. +// This interface follows time.Timer's methods but provides easier mocking. +type Timer interface { + // When the timer expires, the current time will be sent on the + // channel returned from Chan, unless the timer was created by + // AfterFunc. + Chan() <-chan time.Time + + // Reset changes the timer to expire after duration d. + // It returns true if the timer had been active, false if + // the timer had expired or been stopped. + Reset(d time.Duration) bool + + // Stop prevents the Timer from firing. It returns true if + // the call stops the timer, false if the timer has already expired or been stopped. + // Stop does not close the channel, to prevent a read + // from the channel succeeding incorrectly. + Stop() bool +} + +// Alarm type represents a single event. +// Alarms must be created with AtFunc or NewAlarm. +type Alarm interface { + // When the alarm expires, the current time will be sent on the + // channel returned from Chan, unless the alarm was created by + // AtFunc. + Chan() <-chan time.Time + + // Reset changes the alarm to expire at or after time t. + // It returns true if the alarm had been active, false if + // the alarm had fired or been stopped. + Reset(t time.Time) bool + + // Stop prevents the alarm from firing. It returns true if + // the call stops the alarm, false if the alarm has already fired or been stopped. + // Stop does not close the channel, to prevent a read + // from the channel succeeding incorrectly. + Stop() bool +} diff --git a/vendor/github.com/juju/clock/wall.go b/vendor/github.com/juju/clock/wall.go new file mode 100644 index 00000000..1a4b021e --- /dev/null +++ b/vendor/github.com/juju/clock/wall.go @@ -0,0 +1,77 @@ +// Copyright 2015 Canonical Ltd. +// Licensed under the LGPLv3, see LICENCE file for details. + +package clock + +import ( + "time" +) + +// WallClock exposes wall-clock time via the Clock interface. +var WallClock wallClock + +// ensure that WallClock does actually implement the Clock interface. +var _ Clock = WallClock + +// WallClock exposes wall-clock time as returned by time.Now. +type wallClock struct{} + +// Now is part of the Clock interface. +func (wallClock) Now() time.Time { + return time.Now() +} + +// After implements Clock.After. +func (wallClock) After(d time.Duration) <-chan time.Time { + return time.After(d) +} + +// AfterFunc implements Clock.AfterFunc. +func (wallClock) AfterFunc(d time.Duration, f func()) Timer { + return wallTimer{time.AfterFunc(d, f)} +} + +// NewTimer implements Clock.NewTimer. +func (wallClock) NewTimer(d time.Duration) Timer { + return wallTimer{time.NewTimer(d)} +} + +// wallTimer implements the Timer interface. +type wallTimer struct { + *time.Timer +} + +// Chan implements Timer.Chan. +func (t wallTimer) Chan() <-chan time.Time { + return t.C +} + +// At implements Clock.At. +func (wallClock) At(t time.Time) <-chan time.Time { + return time.After(time.Until(t)) +} + +// AtFunc implements Clock.AtFunc. +func (wallClock) AtFunc(t time.Time, f func()) Alarm { + return wallAlarm{time.AfterFunc(time.Until(t), f)} +} + +// NewAlarm implements Clock.NewAlarm. +func (wallClock) NewAlarm(t time.Time) Alarm { + return wallAlarm{time.NewTimer(time.Until(t))} +} + +// wallAlarm implements the Alarm interface. +type wallAlarm struct { + *time.Timer +} + +// Chan implements Alarm.Chan. +func (a wallAlarm) Chan() <-chan time.Time { + return a.C +} + +// Reset implements Alarm.Reset +func (a wallAlarm) Reset(t time.Time) bool { + return a.Timer.Reset(time.Until(t)) +} diff --git a/vendor/github.com/juju/errors/.gitignore b/vendor/github.com/juju/errors/.gitignore new file mode 100644 index 00000000..83656241 --- /dev/null +++ b/vendor/github.com/juju/errors/.gitignore @@ -0,0 +1,23 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test diff --git a/vendor/github.com/juju/errors/LICENSE b/vendor/github.com/juju/errors/LICENSE new file mode 100644 index 00000000..ade9307b --- /dev/null +++ b/vendor/github.com/juju/errors/LICENSE @@ -0,0 +1,191 @@ +All files in this repository are licensed as follows. If you contribute +to this repository, it is assumed that you license your contribution +under the same license unless you state otherwise. + +All files Copyright (C) 2015 Canonical Ltd. unless otherwise specified in the file. + +This software is licensed under the LGPLv3, included below. + +As a special exception to the GNU Lesser General Public License version 3 +("LGPL3"), the copyright holders of this Library give you permission to +convey to a third party a Combined Work that links statically or dynamically +to this Library without providing any Minimal Corresponding Source or +Minimal Application Code as set out in 4d or providing the installation +information set out in section 4e, provided that you comply with the other +provisions of LGPL3 and provided that you meet, for the Application the +terms and conditions of the license(s) which apply to the Application. + +Except as stated in this special exception, the provisions of LGPL3 will +continue to comply in full to this Library. If you modify this Library, you +may apply this exception to your version of this Library, but you are not +obliged to do so. If you do not wish to do so, delete this exception +statement from your version. This exception does not (and cannot) modify any +license terms which apply to the Application, with which you must still +comply. + + + GNU LESSER GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + + This version of the GNU Lesser General Public License incorporates +the terms and conditions of version 3 of the GNU General Public +License, supplemented by the additional permissions listed below. + + 0. Additional Definitions. + + As used herein, "this License" refers to version 3 of the GNU Lesser +General Public License, and the "GNU GPL" refers to version 3 of the GNU +General Public License. + + "The Library" refers to a covered work governed by this License, +other than an Application or a Combined Work as defined below. + + An "Application" is any work that makes use of an interface provided +by the Library, but which is not otherwise based on the Library. +Defining a subclass of a class defined by the Library is deemed a mode +of using an interface provided by the Library. + + A "Combined Work" is a work produced by combining or linking an +Application with the Library. The particular version of the Library +with which the Combined Work was made is also called the "Linked +Version". + + The "Minimal Corresponding Source" for a Combined Work means the +Corresponding Source for the Combined Work, excluding any source code +for portions of the Combined Work that, considered in isolation, are +based on the Application, and not on the Linked Version. + + The "Corresponding Application Code" for a Combined Work means the +object code and/or source code for the Application, including any data +and utility programs needed for reproducing the Combined Work from the +Application, but excluding the System Libraries of the Combined Work. + + 1. Exception to Section 3 of the GNU GPL. + + You may convey a covered work under sections 3 and 4 of this License +without being bound by section 3 of the GNU GPL. + + 2. Conveying Modified Versions. + + If you modify a copy of the Library, and, in your modifications, a +facility refers to a function or data to be supplied by an Application +that uses the facility (other than as an argument passed when the +facility is invoked), then you may convey a copy of the modified +version: + + a) under this License, provided that you make a good faith effort to + ensure that, in the event an Application does not supply the + function or data, the facility still operates, and performs + whatever part of its purpose remains meaningful, or + + b) under the GNU GPL, with none of the additional permissions of + this License applicable to that copy. + + 3. Object Code Incorporating Material from Library Header Files. + + The object code form of an Application may incorporate material from +a header file that is part of the Library. You may convey such object +code under terms of your choice, provided that, if the incorporated +material is not limited to numerical parameters, data structure +layouts and accessors, or small macros, inline functions and templates +(ten or fewer lines in length), you do both of the following: + + a) Give prominent notice with each copy of the object code that the + Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the object code with a copy of the GNU GPL and this license + document. + + 4. Combined Works. + + You may convey a Combined Work under terms of your choice that, +taken together, effectively do not restrict modification of the +portions of the Library contained in the Combined Work and reverse +engineering for debugging such modifications, if you also do each of +the following: + + a) Give prominent notice with each copy of the Combined Work that + the Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the Combined Work with a copy of the GNU GPL and this license + document. + + c) For a Combined Work that displays copyright notices during + execution, include the copyright notice for the Library among + these notices, as well as a reference directing the user to the + copies of the GNU GPL and this license document. + + d) Do one of the following: + + 0) Convey the Minimal Corresponding Source under the terms of this + License, and the Corresponding Application Code in a form + suitable for, and under terms that permit, the user to + recombine or relink the Application with a modified version of + the Linked Version to produce a modified Combined Work, in the + manner specified by section 6 of the GNU GPL for conveying + Corresponding Source. + + 1) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (a) uses at run time + a copy of the Library already present on the user's computer + system, and (b) will operate properly with a modified version + of the Library that is interface-compatible with the Linked + Version. + + e) Provide Installation Information, but only if you would otherwise + be required to provide such information under section 6 of the + GNU GPL, and only to the extent that such information is + necessary to install and execute a modified version of the + Combined Work produced by recombining or relinking the + Application with a modified version of the Linked Version. (If + you use option 4d0, the Installation Information must accompany + the Minimal Corresponding Source and Corresponding Application + Code. If you use option 4d1, you must provide the Installation + Information in the manner specified by section 6 of the GNU GPL + for conveying Corresponding Source.) + + 5. Combined Libraries. + + You may place library facilities that are a work based on the +Library side by side in a single library together with other library +facilities that are not Applications and are not covered by this +License, and convey such a combined library under terms of your +choice, if you do both of the following: + + a) Accompany the combined library with a copy of the same work based + on the Library, uncombined with any other library facilities, + conveyed under the terms of this License. + + b) Give prominent notice with the combined library that part of it + is a work based on the Library, and explaining where to find the + accompanying uncombined form of the same work. + + 6. Revised Versions of the GNU Lesser General Public License. + + The Free Software Foundation may publish revised and/or new versions +of the GNU Lesser General Public License from time to time. Such new +versions will be similar in spirit to the present version, but may +differ in detail to address new problems or concerns. + + Each version is given a distinguishing version number. If the +Library as you received it specifies that a certain numbered version +of the GNU Lesser General Public License "or any later version" +applies to it, you have the option of following the terms and +conditions either of that published version or of any later version +published by the Free Software Foundation. If the Library as you +received it does not specify a version number of the GNU Lesser +General Public License, you may choose any version of the GNU Lesser +General Public License ever published by the Free Software Foundation. + + If the Library as you received it specifies that a proxy can decide +whether future versions of the GNU Lesser General Public License shall +apply, that proxy's public statement of acceptance of any version is +permanent authorization for you to choose that version for the +Library. diff --git a/vendor/github.com/juju/errors/Makefile b/vendor/github.com/juju/errors/Makefile new file mode 100644 index 00000000..a5bc81e6 --- /dev/null +++ b/vendor/github.com/juju/errors/Makefile @@ -0,0 +1,24 @@ +PROJECT := github.com/juju/errors + +.PHONY: check-licence check-go check docs + +check: check-licence check-go + go test $(PROJECT)/... + +check-licence: + @(fgrep -rl "Licensed under the LGPLv3" --exclude *.s .;\ + fgrep -rl "MACHINE GENERATED BY THE COMMAND ABOVE; DO NOT EDIT" --exclude *.s .;\ + find . -name "*.go") | sed -e 's,\./,,' | sort | uniq -u | \ + xargs -I {} echo FAIL: licence missed: {} + +check-go: + $(eval GOFMT := $(strip $(shell gofmt -l .| sed -e "s/^/ /g"))) + @(if [ x$(GOFMT) != x"" ]; then \ + echo go fmt is sad: $(GOFMT); \ + exit 1; \ + fi ) + @(go vet -all -composites=false -copylocks=false .) + +docs: + godoc2md github.com/juju/errors > README.md + sed -i '5i[\[GoDoc](https://godoc.org/github.com/juju/errors?status.svg)](https://godoc.org/github.com/juju/errors)' README.md diff --git a/vendor/github.com/juju/errors/README.md b/vendor/github.com/juju/errors/README.md new file mode 100644 index 00000000..271aa78c --- /dev/null +++ b/vendor/github.com/juju/errors/README.md @@ -0,0 +1,707 @@ + +# errors + import "github.com/juju/errors" + +[![GoDoc](https://godoc.org/github.com/juju/errors?status.svg)](https://godoc.org/github.com/juju/errors) + +The juju/errors provides an easy way to annotate errors without losing the +original error context. + +The exported `New` and `Errorf` functions are designed to replace the +`errors.New` and `fmt.Errorf` functions respectively. The same underlying +error is there, but the package also records the location at which the error +was created. + +A primary use case for this library is to add extra context any time an +error is returned from a function. + + + if err := SomeFunc(); err != nil { + return err + } + +This instead becomes: + + + if err := SomeFunc(); err != nil { + return errors.Trace(err) + } + +which just records the file and line number of the Trace call, or + + + if err := SomeFunc(); err != nil { + return errors.Annotate(err, "more context") + } + +which also adds an annotation to the error. + +When you want to check to see if an error is of a particular type, a helper +function is normally exported by the package that returned the error, like the +`os` package does. The underlying cause of the error is available using the +`Cause` function. + + + os.IsNotExist(errors.Cause(err)) + +The result of the `Error()` call on an annotated error is the annotations joined +with colons, then the result of the `Error()` method for the underlying error +that was the cause. + + + err := errors.Errorf("original") + err = errors.Annotatef(err, "context") + err = errors.Annotatef(err, "more context") + err.Error() -> "more context: context: original" + +Obviously recording the file, line and functions is not very useful if you +cannot get them back out again. + + + errors.ErrorStack(err) + +will return something like: + + + first error + github.com/juju/errors/annotation_test.go:193: + github.com/juju/errors/annotation_test.go:194: annotation + github.com/juju/errors/annotation_test.go:195: + github.com/juju/errors/annotation_test.go:196: more context + github.com/juju/errors/annotation_test.go:197: + +The first error was generated by an external system, so there was no location +associated. The second, fourth, and last lines were generated with Trace calls, +and the other two through Annotate. + +Sometimes when responding to an error you want to return a more specific error +for the situation. + + + if err := FindField(field); err != nil { + return errors.Wrap(err, errors.NotFoundf(field)) + } + +This returns an error where the complete error stack is still available, and +`errors.Cause()` will return the `NotFound` error. + + + + + + +## func AlreadyExistsf +``` go +func AlreadyExistsf(format string, args ...interface{}) error +``` +AlreadyExistsf returns an error which satisfies IsAlreadyExists(). + + +## func Annotate +``` go +func Annotate(other error, message string) error +``` +Annotate is used to add extra context to an existing error. The location of +the Annotate call is recorded with the annotations. The file, line and +function are also recorded. + +For example: + + + if err := SomeFunc(); err != nil { + return errors.Annotate(err, "failed to frombulate") + } + + +## func Annotatef +``` go +func Annotatef(other error, format string, args ...interface{}) error +``` +Annotatef is used to add extra context to an existing error. The location of +the Annotate call is recorded with the annotations. The file, line and +function are also recorded. + +For example: + + + if err := SomeFunc(); err != nil { + return errors.Annotatef(err, "failed to frombulate the %s", arg) + } + + +## func BadRequestf +``` go +func BadRequestf(format string, args ...interface{}) error +``` +BadRequestf returns an error which satisfies IsBadRequest(). + + +## func Cause +``` go +func Cause(err error) error +``` +Cause returns the cause of the given error. This will be either the +original error, or the result of a Wrap or Mask call. + +Cause is the usual way to diagnose errors that may have been wrapped by +the other errors functions. + + +## func DeferredAnnotatef +``` go +func DeferredAnnotatef(err *error, format string, args ...interface{}) +``` +DeferredAnnotatef annotates the given error (when it is not nil) with the given +format string and arguments (like fmt.Sprintf). If *err is nil, DeferredAnnotatef +does nothing. This method is used in a defer statement in order to annotate any +resulting error with the same message. + +For example: + + + defer DeferredAnnotatef(&err, "failed to frombulate the %s", arg) + + +## func Details +``` go +func Details(err error) string +``` +Details returns information about the stack of errors wrapped by err, in +the format: + + + [{filename:99: error one} {otherfile:55: cause of error one}] + +This is a terse alternative to ErrorStack as it returns a single line. + + +## func ErrorStack +``` go +func ErrorStack(err error) string +``` +ErrorStack returns a string representation of the annotated error. If the +error passed as the parameter is not an annotated error, the result is +simply the result of the Error() method on that error. + +If the error is an annotated error, a multi-line string is returned where +each line represents one entry in the annotation stack. The full filename +from the call stack is used in the output. + + + first error + github.com/juju/errors/annotation_test.go:193: + github.com/juju/errors/annotation_test.go:194: annotation + github.com/juju/errors/annotation_test.go:195: + github.com/juju/errors/annotation_test.go:196: more context + github.com/juju/errors/annotation_test.go:197: + + +## func Errorf +``` go +func Errorf(format string, args ...interface{}) error +``` +Errorf creates a new annotated error and records the location that the +error is created. This should be a drop in replacement for fmt.Errorf. + +For example: + + + return errors.Errorf("validation failed: %s", message) + + +## func Forbiddenf +``` go +func Forbiddenf(format string, args ...interface{}) error +``` +Forbiddenf returns an error which satistifes IsForbidden() + + +## func IsAlreadyExists +``` go +func IsAlreadyExists(err error) bool +``` +IsAlreadyExists reports whether the error was created with +AlreadyExistsf() or NewAlreadyExists(). + + +## func IsBadRequest +``` go +func IsBadRequest(err error) bool +``` +IsBadRequest reports whether err was created with BadRequestf() or +NewBadRequest(). + + +## func IsForbidden +``` go +func IsForbidden(err error) bool +``` +IsForbidden reports whether err was created with Forbiddenf() or +NewForbidden(). + + +## func IsMethodNotAllowed +``` go +func IsMethodNotAllowed(err error) bool +``` +IsMethodNotAllowed reports whether err was created with MethodNotAllowedf() or +NewMethodNotAllowed(). + + +## func IsNotAssigned +``` go +func IsNotAssigned(err error) bool +``` +IsNotAssigned reports whether err was created with NotAssignedf() or +NewNotAssigned(). + + +## func IsNotFound +``` go +func IsNotFound(err error) bool +``` +IsNotFound reports whether err was created with NotFoundf() or +NewNotFound(). + + +## func IsNotImplemented +``` go +func IsNotImplemented(err error) bool +``` +IsNotImplemented reports whether err was created with +NotImplementedf() or NewNotImplemented(). + + +## func IsNotProvisioned +``` go +func IsNotProvisioned(err error) bool +``` +IsNotProvisioned reports whether err was created with NotProvisionedf() or +NewNotProvisioned(). + + +## func IsNotSupported +``` go +func IsNotSupported(err error) bool +``` +IsNotSupported reports whether the error was created with +NotSupportedf() or NewNotSupported(). + + +## func IsNotValid +``` go +func IsNotValid(err error) bool +``` +IsNotValid reports whether the error was created with NotValidf() or +NewNotValid(). + + +## func IsUnauthorized +``` go +func IsUnauthorized(err error) bool +``` +IsUnauthorized reports whether err was created with Unauthorizedf() or +NewUnauthorized(). + + +## func IsUserNotFound +``` go +func IsUserNotFound(err error) bool +``` +IsUserNotFound reports whether err was created with UserNotFoundf() or +NewUserNotFound(). + + +## func Mask +``` go +func Mask(other error) error +``` +Mask hides the underlying error type, and records the location of the masking. + + +## func Maskf +``` go +func Maskf(other error, format string, args ...interface{}) error +``` +Mask masks the given error with the given format string and arguments (like +fmt.Sprintf), returning a new error that maintains the error stack, but +hides the underlying error type. The error string still contains the full +annotations. If you want to hide the annotations, call Wrap. + + +## func MethodNotAllowedf +``` go +func MethodNotAllowedf(format string, args ...interface{}) error +``` +MethodNotAllowedf returns an error which satisfies IsMethodNotAllowed(). + + +## func New +``` go +func New(message string) error +``` +New is a drop in replacement for the standard library errors module that records +the location that the error is created. + +For example: + + + return errors.New("validation failed") + + +## func NewAlreadyExists +``` go +func NewAlreadyExists(err error, msg string) error +``` +NewAlreadyExists returns an error which wraps err and satisfies +IsAlreadyExists(). + + +## func NewBadRequest +``` go +func NewBadRequest(err error, msg string) error +``` +NewBadRequest returns an error which wraps err that satisfies +IsBadRequest(). + + +## func NewForbidden +``` go +func NewForbidden(err error, msg string) error +``` +NewForbidden returns an error which wraps err that satisfies +IsForbidden(). + + +## func NewMethodNotAllowed +``` go +func NewMethodNotAllowed(err error, msg string) error +``` +NewMethodNotAllowed returns an error which wraps err that satisfies +IsMethodNotAllowed(). + + +## func NewNotAssigned +``` go +func NewNotAssigned(err error, msg string) error +``` +NewNotAssigned returns an error which wraps err that satisfies +IsNotAssigned(). + + +## func NewNotFound +``` go +func NewNotFound(err error, msg string) error +``` +NewNotFound returns an error which wraps err that satisfies +IsNotFound(). + + +## func NewNotImplemented +``` go +func NewNotImplemented(err error, msg string) error +``` +NewNotImplemented returns an error which wraps err and satisfies +IsNotImplemented(). + + +## func NewNotProvisioned +``` go +func NewNotProvisioned(err error, msg string) error +``` +NewNotProvisioned returns an error which wraps err that satisfies +IsNotProvisioned(). + + +## func NewNotSupported +``` go +func NewNotSupported(err error, msg string) error +``` +NewNotSupported returns an error which wraps err and satisfies +IsNotSupported(). + + +## func NewNotValid +``` go +func NewNotValid(err error, msg string) error +``` +NewNotValid returns an error which wraps err and satisfies IsNotValid(). + + +## func NewUnauthorized +``` go +func NewUnauthorized(err error, msg string) error +``` +NewUnauthorized returns an error which wraps err and satisfies +IsUnauthorized(). + + +## func NewUserNotFound +``` go +func NewUserNotFound(err error, msg string) error +``` +NewUserNotFound returns an error which wraps err and satisfies +IsUserNotFound(). + + +## func NotAssignedf +``` go +func NotAssignedf(format string, args ...interface{}) error +``` +NotAssignedf returns an error which satisfies IsNotAssigned(). + + +## func NotFoundf +``` go +func NotFoundf(format string, args ...interface{}) error +``` +NotFoundf returns an error which satisfies IsNotFound(). + + +## func NotImplementedf +``` go +func NotImplementedf(format string, args ...interface{}) error +``` +NotImplementedf returns an error which satisfies IsNotImplemented(). + + +## func NotProvisionedf +``` go +func NotProvisionedf(format string, args ...interface{}) error +``` +NotProvisionedf returns an error which satisfies IsNotProvisioned(). + + +## func NotSupportedf +``` go +func NotSupportedf(format string, args ...interface{}) error +``` +NotSupportedf returns an error which satisfies IsNotSupported(). + + +## func NotValidf +``` go +func NotValidf(format string, args ...interface{}) error +``` +NotValidf returns an error which satisfies IsNotValid(). + + +## func Trace +``` go +func Trace(other error) error +``` +Trace adds the location of the Trace call to the stack. The Cause of the +resulting error is the same as the error parameter. If the other error is +nil, the result will be nil. + +For example: + + + if err := SomeFunc(); err != nil { + return errors.Trace(err) + } + + +## func Unauthorizedf +``` go +func Unauthorizedf(format string, args ...interface{}) error +``` +Unauthorizedf returns an error which satisfies IsUnauthorized(). + + +## func UserNotFoundf +``` go +func UserNotFoundf(format string, args ...interface{}) error +``` +UserNotFoundf returns an error which satisfies IsUserNotFound(). + + +## func Wrap +``` go +func Wrap(other, newDescriptive error) error +``` +Wrap changes the Cause of the error. The location of the Wrap call is also +stored in the error stack. + +For example: + + + if err := SomeFunc(); err != nil { + newErr := &packageError{"more context", private_value} + return errors.Wrap(err, newErr) + } + + +## func Wrapf +``` go +func Wrapf(other, newDescriptive error, format string, args ...interface{}) error +``` +Wrapf changes the Cause of the error, and adds an annotation. The location +of the Wrap call is also stored in the error stack. + +For example: + + + if err := SomeFunc(); err != nil { + return errors.Wrapf(err, simpleErrorType, "invalid value %q", value) + } + + + +## type Err +``` go +type Err struct { + // contains filtered or unexported fields +} +``` +Err holds a description of an error along with information about +where the error was created. + +It may be embedded in custom error types to add extra information that +this errors package can understand. + + + + + + + + + +### func NewErr +``` go +func NewErr(format string, args ...interface{}) Err +``` +NewErr is used to return an Err for the purpose of embedding in other +structures. The location is not specified, and needs to be set with a call +to SetLocation. + +For example: + + + type FooError struct { + errors.Err + code int + } + + func NewFooError(code int) error { + err := &FooError{errors.NewErr("foo"), code} + err.SetLocation(1) + return err + } + + +### func NewErrWithCause +``` go +func NewErrWithCause(other error, format string, args ...interface{}) Err +``` +NewErrWithCause is used to return an Err with cause by other error for the purpose of embedding in other +structures. The location is not specified, and needs to be set with a call +to SetLocation. + +For example: + + + type FooError struct { + errors.Err + code int + } + + func (e *FooError) Annotate(format string, args ...interface{}) error { + err := &FooError{errors.NewErrWithCause(e.Err, format, args...), e.code} + err.SetLocation(1) + return err + }) + + + + +### func (\*Err) Cause +``` go +func (e *Err) Cause() error +``` +The Cause of an error is the most recent error in the error stack that +meets one of these criteria: the original error that was raised; the new +error that was passed into the Wrap function; the most recently masked +error; or nil if the error itself is considered the Cause. Normally this +method is not invoked directly, but instead through the Cause stand alone +function. + + + +### func (\*Err) Error +``` go +func (e *Err) Error() string +``` +Error implements error.Error. + + + +### func (\*Err) Format +``` go +func (e *Err) Format(s fmt.State, verb rune) +``` +Format implements fmt.Formatter +When printing errors with %+v it also prints the stack trace. +%#v unsurprisingly will print the real underlying type. + + + +### func (\*Err) Location +``` go +func (e *Err) Location() (filename string, line int) +``` +Location is the file and line of where the error was most recently +created or annotated. + + + +### func (\*Err) Message +``` go +func (e *Err) Message() string +``` +Message returns the message stored with the most recent location. This is +the empty string if the most recent call was Trace, or the message stored +with Annotate or Mask. + + + +### func (\*Err) SetLocation +``` go +func (e *Err) SetLocation(callDepth int) +``` +SetLocation records the source location of the error at callDepth stack +frames above the call. + + + +### func (\*Err) StackTrace +``` go +func (e *Err) StackTrace() []string +``` +StackTrace returns one string for each location recorded in the stack of +errors. The first value is the originating error, with a line for each +other annotation or tracing of the error. + + + +### func (\*Err) Underlying +``` go +func (e *Err) Underlying() error +``` +Underlying returns the previous error in the error stack, if any. A client +should not ever really call this method. It is used to build the error +stack and should not be introspected by client calls. Or more +specifically, clients should not depend on anything but the `Cause` of an +error. + + + + + + + + + +- - - +Generated by [godoc2md](http://godoc.org/github.com/davecheney/godoc2md) diff --git a/vendor/github.com/juju/errors/doc.go b/vendor/github.com/juju/errors/doc.go new file mode 100644 index 00000000..d4403662 --- /dev/null +++ b/vendor/github.com/juju/errors/doc.go @@ -0,0 +1,79 @@ +// Copyright 2013, 2014 Canonical Ltd. +// Licensed under the LGPLv3, see LICENCE file for details. + +/* +Package errors provides an easy way to annotate errors without losing the +original error context. + +The exported `New` and `Errorf` functions are designed to replace the +`errors.New` and `fmt.Errorf` functions respectively. The same underlying +error is there, but the package also records the location at which the error +was created. + +A primary use case for this library is to add extra context any time an +error is returned from a function. + + if err := SomeFunc(); err != nil { + return err + } + +This instead becomes: + + if err := SomeFunc(); err != nil { + return errors.Trace(err) + } + +which just records the file and line number of the Trace call, or + + if err := SomeFunc(); err != nil { + return errors.Annotate(err, "more context") + } + +which also adds an annotation to the error. + +When you want to check to see if an error is of a particular type, a helper +function is normally exported by the package that returned the error, like the +`os` package does. The underlying cause of the error is available using the +`Cause` function. + + os.IsNotExist(errors.Cause(err)) + +The result of the `Error()` call on an annotated error is the annotations joined +with colons, then the result of the `Error()` method for the underlying error +that was the cause. + + err := errors.Errorf("original") + err = errors.Annotatef(err, "context") + err = errors.Annotatef(err, "more context") + err.Error() -> "more context: context: original" + +Obviously recording the file, line and functions is not very useful if you +cannot get them back out again. + + errors.ErrorStack(err) + +will return something like: + + first error + github.com/juju/errors/annotation_test.go:193: + github.com/juju/errors/annotation_test.go:194: annotation + github.com/juju/errors/annotation_test.go:195: + github.com/juju/errors/annotation_test.go:196: more context + github.com/juju/errors/annotation_test.go:197: + +The first error was generated by an external system, so there was no location +associated. The second, fourth, and last lines were generated with Trace calls, +and the other two through Annotate. + +Sometimes when responding to an error you want to return a more specific error +for the situation. + + if err := FindField(field); err != nil { + return errors.Wrap(err, errors.NotFoundf(field)) + } + +This returns an error where the complete error stack is still available, and +`errors.Cause()` will return the `NotFound` error. + +*/ +package errors diff --git a/vendor/github.com/juju/errors/error.go b/vendor/github.com/juju/errors/error.go new file mode 100644 index 00000000..326b917a --- /dev/null +++ b/vendor/github.com/juju/errors/error.go @@ -0,0 +1,227 @@ +// Copyright 2014 Canonical Ltd. +// Licensed under the LGPLv3, see LICENCE file for details. + +package errors + +import ( + "fmt" + "reflect" +) + +// Err holds a description of an error along with information about +// where the error was created. +// +// It may be embedded in custom error types to add extra information that +// this errors package can understand. +type Err struct { + // message holds an annotation of the error. + message string + + // cause holds the cause of the error as returned + // by the Cause method. + cause error + + // previous holds the previous error in the error stack, if any. + previous error + + // function is the package path-qualified function name where the + // error was created. + function string + + // line is the line number the error was created on inside of function + line int +} + +// Locationer is an interface that represents a certain class of errors that +// contain the location information from where they were raised. +type Locationer interface { + // Location returns the path-qualified function name where the error was + // created and the line number + Location() (function string, line int) +} + +// locationError is the internal implementation of the Locationer interface. +type locationError struct { + error + + // function is the package path-qualified function name where the + // error was created. + function string + + // line is the line number the error was created on inside of function + line int +} + +// newLocationError constructs a new Locationer error from the supplied error +// with the location set to callDepth in the stack. If a nill error is provided +// to this function then a new empty error is constructed. +func newLocationError(err error, callDepth int) *locationError { + le := &locationError{error: err} + le.function, le.line = getLocation(callDepth + 1) + return le +} + +// Error implementes the error interface. +func (l *locationError) Error() string { + if l.error == nil { + return "" + } + return l.error.Error() +} + +// *locationError implements Locationer.Location interface +func (l *locationError) Location() (string, int) { + return l.function, l.line +} + +func (l *locationError) Unwrap() error { + return l.error +} + +// NewErr is used to return an Err for the purpose of embedding in other +// structures. The location is not specified, and needs to be set with a call +// to SetLocation. +// +// For example: +// type FooError struct { +// errors.Err +// code int +// } +// +// func NewFooError(code int) error { +// err := &FooError{errors.NewErr("foo"), code} +// err.SetLocation(1) +// return err +// } +func NewErr(format string, args ...interface{}) Err { + return Err{ + message: fmt.Sprintf(format, args...), + } +} + +// NewErrWithCause is used to return an Err with cause by other error for the purpose of embedding in other +// structures. The location is not specified, and needs to be set with a call +// to SetLocation. +// +// For example: +// type FooError struct { +// errors.Err +// code int +// } +// +// func (e *FooError) Annotate(format string, args ...interface{}) error { +// err := &FooError{errors.NewErrWithCause(e.Err, format, args...), e.code} +// err.SetLocation(1) +// return err +// }) +func NewErrWithCause(other error, format string, args ...interface{}) Err { + return Err{ + message: fmt.Sprintf(format, args...), + cause: Cause(other), + previous: other, + } +} + +// Location returns the package path-qualified function name and line of where +// the error was most recently created or annotated. +func (e *Err) Location() (function string, line int) { + return e.function, e.line +} + +// Underlying returns the previous error in the error stack, if any. A client +// should not ever really call this method. It is used to build the error +// stack and should not be introspected by client calls. Or more +// specifically, clients should not depend on anything but the `Cause` of an +// error. +func (e *Err) Underlying() error { + return e.previous +} + +// Cause returns the most recent error in the error stack that +// meets one of these criteria: the original error that was raised; the new +// error that was passed into the Wrap function; the most recently masked +// error; or nil if the error itself is considered the Cause. Normally this +// method is not invoked directly, but instead through the Cause stand alone +// function. +func (e *Err) Cause() error { + return e.cause +} + +// Message returns the message stored with the most recent location. This is +// the empty string if the most recent call was Trace, or the message stored +// with Annotate or Mask. +func (e *Err) Message() string { + return e.message +} + +// Error implements error.Error. +func (e *Err) Error() string { + // We want to walk up the stack of errors showing the annotations + // as long as the cause is the same. + err := e.previous + if !sameError(Cause(err), e.cause) && e.cause != nil { + err = e.cause + } + switch { + case err == nil: + return e.message + case e.message == "": + return err.Error() + } + return fmt.Sprintf("%s: %v", e.message, err) +} + +// Format implements fmt.Formatter +// When printing errors with %+v it also prints the stack trace. +// %#v unsurprisingly will print the real underlying type. +func (e *Err) Format(s fmt.State, verb rune) { + switch verb { + case 'v': + switch { + case s.Flag('+'): + fmt.Fprintf(s, "%s", ErrorStack(e)) + return + case s.Flag('#'): + // avoid infinite recursion by wrapping e into a type + // that doesn't implement Formatter. + fmt.Fprintf(s, "%#v", (*unformatter)(e)) + return + } + fallthrough + case 's': + fmt.Fprintf(s, "%s", e.Error()) + case 'q': + fmt.Fprintf(s, "%q", e.Error()) + default: + fmt.Fprintf(s, "%%!%c(%T=%s)", verb, e, e.Error()) + } +} + +// helper for Format +type unformatter Err + +func (unformatter) Format() { /* break the fmt.Formatter interface */ } + +// SetLocation records the package path-qualified function name of the error at +// callDepth stack frames above the call. +func (e *Err) SetLocation(callDepth int) { + e.function, e.line = getLocation(callDepth + 1) +} + +// StackTrace returns one string for each location recorded in the stack of +// errors. The first value is the originating error, with a line for each +// other annotation or tracing of the error. +func (e *Err) StackTrace() []string { + return errorStack(e) +} + +// Ideally we'd have a way to check identity, but deep equals will do. +func sameError(e1, e2 error) bool { + return reflect.DeepEqual(e1, e2) +} + +// Unwrap is a synonym for Underlying, which allows Err to be used with the +// Unwrap, Is and As functions in Go's standard `errors` library. +func (e *Err) Unwrap() error { + return e.previous +} diff --git a/vendor/github.com/juju/errors/errortypes.go b/vendor/github.com/juju/errors/errortypes.go new file mode 100644 index 00000000..0029f91d --- /dev/null +++ b/vendor/github.com/juju/errors/errortypes.go @@ -0,0 +1,473 @@ +// Copyright 2014 Canonical Ltd. +// Licensed under the LGPLv3, see LICENCE file for details. + +package errors + +import ( + "errors" + stderror "errors" + "fmt" + "strings" +) + +// a ConstError is a prototype for a certain type of error +type ConstError string + +// ConstError implements error +func (e ConstError) Error() string { + return string(e) +} + +// Different types of errors +const ( + // Timeout represents an error on timeout. + Timeout = ConstError("timeout") + // NotFound represents an error when something has not been found. + NotFound = ConstError("not found") + // UserNotFound represents an error when a non-existent user is looked up. + UserNotFound = ConstError("user not found") + // Unauthorized represents an error when an operation is unauthorized. + Unauthorized = ConstError("unauthorized") + // NotImplemented represents an error when something is not + // implemented. + NotImplemented = ConstError("not implemented") + // AlreadyExists represents and error when something already exists. + AlreadyExists = ConstError("already exists") + // NotSupported represents an error when something is not supported. + NotSupported = ConstError("not supported") + // NotValid represents an error when something is not valid. + NotValid = ConstError("not valid") + // NotProvisioned represents an error when something is not yet provisioned. + NotProvisioned = ConstError("not provisioned") + // NotAssigned represents an error when something is not yet assigned to + // something else. + NotAssigned = ConstError("not assigned") + // BadRequest represents an error when a request has bad parameters. + BadRequest = ConstError("bad request") + // MethodNotAllowed represents an error when an HTTP request + // is made with an inappropriate method. + MethodNotAllowed = ConstError("method not allowed") + // Forbidden represents an error when a request cannot be completed because of + // missing privileges. + Forbidden = ConstError("forbidden") + // QuotaLimitExceeded is emitted when an action failed due to a quota limit check. + QuotaLimitExceeded = ConstError("quota limit exceeded") + // NotYetAvailable is the error returned when a resource is not yet available + // but it might be in the future. + NotYetAvailable = ConstError("not yet available") +) + +// errWithType is an Err bundled with its error type (a ConstError) +type errWithType struct { + error + errType ConstError +} + +// Is compares `target` with e's error type +func (e *errWithType) Is(target error) bool { + if &e.errType == nil { + return false + } + return target == e.errType +} + +// Unwrap an errWithType gives the underlying Err +func (e *errWithType) Unwrap() error { + return e.error +} + +func wrapErrorWithMsg(err error, msg string) error { + if err == nil { + return stderror.New(msg) + } + if msg == "" { + return err + } + return fmt.Errorf("%s: %w", msg, err) +} + +func makeWrappedConstError(err error, format string, args ...interface{}) error { + separator := " " + if err.Error() == "" || errors.Is(err, &fmtNoop{}) { + separator = "" + } + return fmt.Errorf(strings.Join([]string{format, "%w"}, separator), append(args, err)...) +} + +// WithType is responsible for annotating an already existing error so that it +// also satisfies that of a ConstError. The resultant error returned should +// satisfy Is(err, errType). If err is nil then a nil error will also be returned. +// +// Now with Go's Is, As and Unwrap support it no longer makes sense to Wrap() +// 2 errors as both of those errors could be chains of errors in their own right. +// WithType aims to solve some of the usefulness of Wrap with the ability to +// make a pre-existing error also satisfy a ConstError type. +func WithType(err error, errType ConstError) error { + if err == nil { + return nil + } + return &errWithType{ + error: err, + errType: errType, + } +} + +// Timeoutf returns an error which satisfies Is(err, Timeout) and the Locationer +// interface. +func Timeoutf(format string, args ...interface{}) error { + return newLocationError( + makeWrappedConstError(Timeout, format, args...), + 1, + ) +} + +// NewTimeout returns an error which wraps err and satisfies Is(err, Timeout) +// and the Locationer interface. +func NewTimeout(err error, msg string) error { + return &errWithType{ + error: newLocationError(wrapErrorWithMsg(err, msg), 1), + errType: Timeout, + } +} + +// Deprecated: IsTimeout reports whether err is a Timeout error. Use +// Is(err, Timeout). +func IsTimeout(err error) bool { + return Is(err, Timeout) +} + +// NotFoundf returns an error which satisfies Is(err, NotFound) and the +// Locationer interface. +func NotFoundf(format string, args ...interface{}) error { + return newLocationError( + makeWrappedConstError(NotFound, format, args...), + 1, + ) +} + +// NewNotFound returns an error which wraps err and satisfies Is(err, NotFound) +// and the Locationer interface. +func NewNotFound(err error, msg string) error { + return &errWithType{ + error: newLocationError(wrapErrorWithMsg(err, msg), 1), + errType: NotFound, + } +} + +// Deprecated: IsNotFound reports whether err is a NotFound error. Use +// Is(err, NotFound). +func IsNotFound(err error) bool { + return Is(err, NotFound) +} + +// UserNotFoundf returns an error which satisfies Is(err, UserNotFound) and the +// Locationer interface. +func UserNotFoundf(format string, args ...interface{}) error { + return newLocationError( + makeWrappedConstError(UserNotFound, format, args...), + 1, + ) +} + +// NewUserNotFound returns an error which wraps err and satisfies +// Is(err, UserNotFound) and the Locationer interface. +func NewUserNotFound(err error, msg string) error { + return &errWithType{ + error: newLocationError(wrapErrorWithMsg(err, msg), 1), + errType: UserNotFound, + } +} + +// Deprecated: IsUserNotFound reports whether err is a UserNotFound error. Use +// Is(err, UserNotFound). +func IsUserNotFound(err error) bool { + return Is(err, UserNotFound) +} + +// Unauthorizedf returns an error that satisfies Is(err, Unauthorized) and +// the Locationer interface. +func Unauthorizedf(format string, args ...interface{}) error { + return newLocationError( + makeWrappedConstError(Hide(Unauthorized), format, args...), + 1, + ) +} + +// NewUnauthorized returns an error which wraps err and satisfies +// Is(err, Unathorized) and the Locationer interface. +func NewUnauthorized(err error, msg string) error { + return &errWithType{ + error: newLocationError(wrapErrorWithMsg(err, msg), 1), + errType: Unauthorized, + } +} + +// Deprecated: IsUnauthorized reports whether err is a Unauthorized error. Use +// Is(err, Unauthorized). +func IsUnauthorized(err error) bool { + return Is(err, Unauthorized) +} + +// NotImplementedf returns an error which satisfies Is(err, NotImplemented) and +// the Locationer interface. +func NotImplementedf(format string, args ...interface{}) error { + return newLocationError( + makeWrappedConstError(NotImplemented, format, args...), + 1, + ) +} + +// NewNotImplemented returns an error which wraps err and satisfies +// Is(err, NotImplemented) and the Locationer interface. +func NewNotImplemented(err error, msg string) error { + return &errWithType{ + error: newLocationError(wrapErrorWithMsg(err, msg), 1), + errType: NotImplemented, + } +} + +// Deprecated: IsNotImplemented reports whether err is a NotImplemented error. +// Use Is(err, NotImplemented). +func IsNotImplemented(err error) bool { + return Is(err, NotImplemented) +} + +// AlreadyExistsf returns an error which satisfies Is(err, AlreadyExists) and +// the Locationer interface. +func AlreadyExistsf(format string, args ...interface{}) error { + return newLocationError( + makeWrappedConstError(AlreadyExists, format, args...), + 1, + ) +} + +// NewAlreadyExists returns an error which wraps err and satisfies +// Is(err, AlreadyExists) and the Locationer interface. +func NewAlreadyExists(err error, msg string) error { + return &errWithType{ + error: newLocationError(wrapErrorWithMsg(err, msg), 1), + errType: AlreadyExists, + } +} + +// Deprecated: IsAlreadyExists reports whether the err is a AlreadyExists +// error. Use Is(err, AlreadyExists). +func IsAlreadyExists(err error) bool { + return Is(err, AlreadyExists) +} + +// NotSupportedf returns an error which satisfies Is(err, NotSupported) and the +// Locationer interface. +func NotSupportedf(format string, args ...interface{}) error { + return newLocationError( + makeWrappedConstError(NotSupported, format, args...), + 1, + ) +} + +// NewNotSupported returns an error which satisfies Is(err, NotSupported) and +// the Locationer interface. +func NewNotSupported(err error, msg string) error { + return &errWithType{ + error: newLocationError(wrapErrorWithMsg(err, msg), 1), + errType: NotSupported, + } +} + +// Deprecated: IsNotSupported reports whether err is a NotSupported error. Use +// Is(err, NotSupported). +func IsNotSupported(err error) bool { + return Is(err, NotSupported) +} + +// NotValidf returns an error which satisfies Is(err, NotValid) and the +// Locationer interface. +func NotValidf(format string, args ...interface{}) error { + return newLocationError( + makeWrappedConstError(NotValid, format, args...), + 1, + ) +} + +// NewNotValid returns an error which wraps err and satisfies Is(err, NotValid) +// and the Locationer interface. +func NewNotValid(err error, msg string) error { + return &errWithType{ + error: newLocationError(wrapErrorWithMsg(err, msg), 1), + errType: NotValid, + } +} + +// Deprecated: IsNotValid reports whether err is a NotValid error. Use +// Is(err, NotValid). +func IsNotValid(err error) bool { + return Is(err, NotValid) +} + +// NotProvisionedf returns an error which satisfies Is(err, NotProvisioned) and +// the Locationer interface. +func NotProvisionedf(format string, args ...interface{}) error { + return newLocationError( + makeWrappedConstError(NotProvisioned, format, args...), + 1, + ) +} + +// NewNotProvisioned returns an error which wraps err and satisfies +// Is(err, NotProvisioned) and the Locationer interface. +func NewNotProvisioned(err error, msg string) error { + return &errWithType{ + error: newLocationError(wrapErrorWithMsg(err, msg), 1), + errType: NotProvisioned, + } +} + +// Deprecated: IsNotProvisioned reports whether err is a NotProvisioned error. +// Use Is(err, NotProvisioned). +func IsNotProvisioned(err error) bool { + return Is(err, NotProvisioned) +} + +// NotAssignedf returns an error which satisfies Is(err, NotAssigned) and the +// Locationer interface. +func NotAssignedf(format string, args ...interface{}) error { + return newLocationError( + makeWrappedConstError(NotAssigned, format, args...), + 1, + ) +} + +// NewNotAssigned returns an error which wraps err and satisfies +// Is(err, NotAssigned) and the Locationer interface. +func NewNotAssigned(err error, msg string) error { + return &errWithType{ + error: newLocationError(wrapErrorWithMsg(err, msg), 1), + errType: NotAssigned, + } +} + +// Deprecated: IsNotAssigned reports whether err is a NotAssigned error. +// Use Is(err, NotAssigned) +func IsNotAssigned(err error) bool { + return Is(err, NotAssigned) +} + +// BadRequestf returns an error which satisfies Is(err, BadRequest) and the +// Locationer interface. +func BadRequestf(format string, args ...interface{}) error { + return newLocationError( + makeWrappedConstError(Hide(BadRequest), format, args...), + 1, + ) +} + +// NewBadRequest returns an error which wraps err and satisfies +// Is(err, BadRequest) and the Locationer interface. +func NewBadRequest(err error, msg string) error { + return &errWithType{ + error: newLocationError(wrapErrorWithMsg(err, msg), 1), + errType: BadRequest, + } +} + +// Deprecated: IsBadRequest reports whether err is a BadRequest error. +// Use Is(err, BadRequest) +func IsBadRequest(err error) bool { + return Is(err, BadRequest) +} + +// MethodNotAllowedf returns an error which satisfies Is(err, MethodNotAllowed) +// and the Locationer interface. +func MethodNotAllowedf(format string, args ...interface{}) error { + return newLocationError( + makeWrappedConstError(Hide(MethodNotAllowed), format, args...), + 1, + ) +} + +// NewMethodNotAllowed returns an error which wraps err and satisfies +// Is(err, MethodNotAllowed) and the Locationer interface. +func NewMethodNotAllowed(err error, msg string) error { + return &errWithType{ + error: newLocationError(wrapErrorWithMsg(err, msg), 1), + errType: MethodNotAllowed, + } +} + +// Deprecated: IsMethodNotAllowed reports whether err is a MethodNotAllowed +// error. Use Is(err, MethodNotAllowed) +func IsMethodNotAllowed(err error) bool { + return Is(err, MethodNotAllowed) +} + +// Forbiddenf returns an error which satistifes Is(err, Forbidden) and the +// Locationer interface. +func Forbiddenf(format string, args ...interface{}) error { + return newLocationError( + makeWrappedConstError(Hide(Forbidden), format, args...), + 1, + ) +} + +// NewForbidden returns an error which wraps err and satisfies +// Is(err, Forbidden) and the Locationer interface. +func NewForbidden(err error, msg string) error { + return &errWithType{ + error: newLocationError(wrapErrorWithMsg(err, msg), 1), + errType: Forbidden, + } +} + +// Deprecated: IsForbidden reports whether err is a Forbidden error. Use +// Is(err, Forbidden). +func IsForbidden(err error) bool { + return Is(err, Forbidden) +} + +// QuotaLimitExceededf returns an error which satisfies +// Is(err, QuotaLimitExceeded) and the Locationer interface. +func QuotaLimitExceededf(format string, args ...interface{}) error { + return newLocationError( + makeWrappedConstError(Hide(QuotaLimitExceeded), format, args...), + 1, + ) +} + +// NewQuotaLimitExceeded returns an error which wraps err and satisfies +// Is(err, QuotaLimitExceeded) and the Locationer interface. +func NewQuotaLimitExceeded(err error, msg string) error { + return &errWithType{ + error: newLocationError(wrapErrorWithMsg(err, msg), 1), + errType: QuotaLimitExceeded, + } +} + +// Deprecated: IsQuotaLimitExceeded reports whether err is a QuoteLimitExceeded +// err. Use Is(err, QuotaLimitExceeded). +func IsQuotaLimitExceeded(err error) bool { + return Is(err, QuotaLimitExceeded) +} + +// NotYetAvailablef returns an error which satisfies Is(err, NotYetAvailable) +// and the Locationer interface. +func NotYetAvailablef(format string, args ...interface{}) error { + return newLocationError( + makeWrappedConstError(Hide(NotYetAvailable), format, args...), + 1, + ) +} + +// NewNotYetAvailable returns an error which wraps err and satisfies +// Is(err, NotYetAvailable) and the Locationer interface. +func NewNotYetAvailable(err error, msg string) error { + return &errWithType{ + error: newLocationError(wrapErrorWithMsg(err, msg), 1), + errType: NotYetAvailable, + } +} + +// Deprecated: IsNotYetAvailable reports whether err is a NotYetAvailable err. +// Use Is(err, NotYetAvailable) +func IsNotYetAvailable(err error) bool { + return Is(err, NotYetAvailable) +} diff --git a/vendor/github.com/juju/errors/functions.go b/vendor/github.com/juju/errors/functions.go new file mode 100644 index 00000000..952a6739 --- /dev/null +++ b/vendor/github.com/juju/errors/functions.go @@ -0,0 +1,454 @@ +// Copyright 2014 Canonical Ltd. +// Licensed under the LGPLv3, see LICENCE file for details. + +package errors + +import ( + stderrors "errors" + "fmt" + "runtime" + "strings" +) + +// New is a drop in replacement for the standard library errors module that records +// the location that the error is created. +// +// For example: +// return errors.New("validation failed") +// +func New(message string) error { + err := &Err{message: message} + err.SetLocation(1) + return err +} + +// Errorf creates a new annotated error and records the location that the +// error is created. This should be a drop in replacement for fmt.Errorf. +// +// For example: +// return errors.Errorf("validation failed: %s", message) +// +func Errorf(format string, args ...interface{}) error { + err := &Err{message: fmt.Sprintf(format, args...)} + err.SetLocation(1) + return err +} + +// getLocation records the package path-qualified function name of the error at +// callDepth stack frames above the call. +func getLocation(callDepth int) (string, int) { + rpc := make([]uintptr, 1) + n := runtime.Callers(callDepth+2, rpc[:]) + if n < 1 { + return "", 0 + } + frame, _ := runtime.CallersFrames(rpc).Next() + return frame.Function, frame.Line +} + +// Trace adds the location of the Trace call to the stack. The Cause of the +// resulting error is the same as the error parameter. If the other error is +// nil, the result will be nil. +// +// For example: +// if err := SomeFunc(); err != nil { +// return errors.Trace(err) +// } +// +func Trace(other error) error { + //return SetLocation(other, 2) + if other == nil { + return nil + } + err := &Err{previous: other, cause: Cause(other)} + err.SetLocation(1) + return err +} + +// Annotate is used to add extra context to an existing error. The location of +// the Annotate call is recorded with the annotations. The file, line and +// function are also recorded. +// +// For example: +// if err := SomeFunc(); err != nil { +// return errors.Annotate(err, "failed to frombulate") +// } +// +func Annotate(other error, message string) error { + if other == nil { + return nil + } + err := &Err{ + previous: other, + cause: Cause(other), + message: message, + } + err.SetLocation(1) + return err +} + +// Annotatef is used to add extra context to an existing error. The location of +// the Annotate call is recorded with the annotations. The file, line and +// function are also recorded. +// +// For example: +// if err := SomeFunc(); err != nil { +// return errors.Annotatef(err, "failed to frombulate the %s", arg) +// } +// +func Annotatef(other error, format string, args ...interface{}) error { + if other == nil { + return nil + } + err := &Err{ + previous: other, + cause: Cause(other), + message: fmt.Sprintf(format, args...), + } + err.SetLocation(1) + return err +} + +// DeferredAnnotatef annotates the given error (when it is not nil) with the given +// format string and arguments (like fmt.Sprintf). If *err is nil, DeferredAnnotatef +// does nothing. This method is used in a defer statement in order to annotate any +// resulting error with the same message. +// +// For example: +// +// defer DeferredAnnotatef(&err, "failed to frombulate the %s", arg) +// +func DeferredAnnotatef(err *error, format string, args ...interface{}) { + if *err == nil { + return + } + newErr := &Err{ + message: fmt.Sprintf(format, args...), + cause: Cause(*err), + previous: *err, + } + newErr.SetLocation(1) + *err = newErr +} + +// Wrap changes the Cause of the error. The location of the Wrap call is also +// stored in the error stack. +// +// For example: +// if err := SomeFunc(); err != nil { +// newErr := &packageError{"more context", private_value} +// return errors.Wrap(err, newErr) +// } +// +func Wrap(other, newDescriptive error) error { + err := &Err{ + previous: other, + cause: newDescriptive, + } + err.SetLocation(1) + return err +} + +// Wrapf changes the Cause of the error, and adds an annotation. The location +// of the Wrap call is also stored in the error stack. +// +// For example: +// if err := SomeFunc(); err != nil { +// return errors.Wrapf(err, simpleErrorType, "invalid value %q", value) +// } +// +func Wrapf(other, newDescriptive error, format string, args ...interface{}) error { + err := &Err{ + message: fmt.Sprintf(format, args...), + previous: other, + cause: newDescriptive, + } + err.SetLocation(1) + return err +} + +// Maskf masks the given error with the given format string and arguments (like +// fmt.Sprintf), returning a new error that maintains the error stack, but +// hides the underlying error type. The error string still contains the full +// annotations. If you want to hide the annotations, call Wrap. +func Maskf(other error, format string, args ...interface{}) error { + if other == nil { + return nil + } + err := &Err{ + message: fmt.Sprintf(format, args...), + previous: other, + } + err.SetLocation(1) + return err +} + +// Mask hides the underlying error type, and records the location of the masking. +func Mask(other error) error { + if other == nil { + return nil + } + err := &Err{ + previous: other, + } + err.SetLocation(1) + return err +} + +// Cause returns the cause of the given error. This will be either the +// original error, or the result of a Wrap or Mask call. +// +// Cause is the usual way to diagnose errors that may have been wrapped by +// the other errors functions. +func Cause(err error) error { + var diag error + if err, ok := err.(causer); ok { + diag = err.Cause() + } + if diag != nil { + return diag + } + return err +} + +type causer interface { + Cause() error +} + +type wrapper interface { + // Message returns the top level error message, + // not including the message from the Previous + // error. + Message() string + + // Underlying returns the Previous error, or nil + // if there is none. + Underlying() error +} + +var ( + _ wrapper = (*Err)(nil) + _ Locationer = (*Err)(nil) + _ causer = (*Err)(nil) +) + +// Details returns information about the stack of errors wrapped by err, in +// the format: +// +// [{filename:99: error one} {otherfile:55: cause of error one}] +// +// This is a terse alternative to ErrorStack as it returns a single line. +func Details(err error) string { + if err == nil { + return "[]" + } + var s []byte + s = append(s, '[') + for { + s = append(s, '{') + if err, ok := err.(Locationer); ok { + file, line := err.Location() + if file != "" { + s = append(s, fmt.Sprintf("%s:%d", file, line)...) + s = append(s, ": "...) + } + } + if cerr, ok := err.(wrapper); ok { + s = append(s, cerr.Message()...) + err = cerr.Underlying() + } else { + s = append(s, err.Error()...) + err = nil + } + s = append(s, '}') + if err == nil { + break + } + s = append(s, ' ') + } + s = append(s, ']') + return string(s) +} + +// ErrorStack returns a string representation of the annotated error. If the +// error passed as the parameter is not an annotated error, the result is +// simply the result of the Error() method on that error. +// +// If the error is an annotated error, a multi-line string is returned where +// each line represents one entry in the annotation stack. The full filename +// from the call stack is used in the output. +// +// first error +// github.com/juju/errors/annotation_test.go:193: +// github.com/juju/errors/annotation_test.go:194: annotation +// github.com/juju/errors/annotation_test.go:195: +// github.com/juju/errors/annotation_test.go:196: more context +// github.com/juju/errors/annotation_test.go:197: +func ErrorStack(err error) string { + return strings.Join(errorStack(err), "\n") +} + +func errorStack(err error) []string { + if err == nil { + return nil + } + + // We want the first error first + var lines []string + for { + var buff []byte + if err, ok := err.(Locationer); ok { + file, line := err.Location() + // Strip off the leading GOPATH/src path elements. + if file != "" { + buff = append(buff, fmt.Sprintf("%s:%d", file, line)...) + buff = append(buff, ": "...) + } + } + if cerr, ok := err.(wrapper); ok { + message := cerr.Message() + buff = append(buff, message...) + // If there is a cause for this error, and it is different to the cause + // of the underlying error, then output the error string in the stack trace. + var cause error + if err1, ok := err.(causer); ok { + cause = err1.Cause() + } + err = cerr.Underlying() + if cause != nil && !sameError(Cause(err), cause) { + if message != "" { + buff = append(buff, ": "...) + } + buff = append(buff, cause.Error()...) + } + } else { + buff = append(buff, err.Error()...) + err = nil + } + lines = append(lines, string(buff)) + if err == nil { + break + } + } + // reverse the lines to get the original error, which was at the end of + // the list, back to the start. + var result []string + for i := len(lines); i > 0; i-- { + result = append(result, lines[i-1]) + } + return result +} + +// Unwrap is a proxy for the Unwrap function in Go's standard `errors` library +// (pkg.go.dev/errors). +func Unwrap(err error) error { + return stderrors.Unwrap(err) +} + +// Is is a proxy for the Is function in Go's standard `errors` library +// (pkg.go.dev/errors). +func Is(err, target error) bool { + return stderrors.Is(err, target) +} + +// HasType is a function wrapper around AsType dropping the where return value +// from AsType() making a function that can be used like this: +// +// return HasType[*MyError](err) +// +// Or +// +// if HasType[*MyError](err) {} +func HasType[T error](err error) bool { + _, rval := AsType[T](err) + return rval +} + +// As is a proxy for the As function in Go's standard `errors` library +// (pkg.go.dev/errors). +func As(err error, target interface{}) bool { + return stderrors.As(err, target) +} + +// AsType is a convenience method for checking and getting an error from within +// a chain that is of type T. If no error is found of type T in the chain the +// zero value of T is returned with false. If an error in the chain implementes +// As(any) bool then it's As method will be called if it's type is not of type T. + +// AsType finds the first error in err's chain that is assignable to type T, and +// if a match is found, returns that error value and true. Otherwise, it returns +// T's zero value and false. +// +// AsType is equivalent to errors.As, but uses a type parameter and returns +// the target, to avoid having to define a variable before the call. For +// example, callers can replace this: +// +// var pathError *fs.PathError +// if errors.As(err, &pathError) { +// fmt.Println("Failed at path:", pathError.Path) +// } +// +// With: +// +// if pathError, ok := errors.AsType[*fs.PathError](err); ok { +// fmt.Println("Failed at path:", pathError.Path) +// } +func AsType[T error](err error) (T, bool) { + for err != nil { + if e, is := err.(T); is { + return e, true + } + var res T + if x, ok := err.(interface{ As(any) bool }); ok && x.As(&res) { + return res, true + } + err = stderrors.Unwrap(err) + } + var zero T + return zero, false +} + +// SetLocation takes a given error and records where in the stack SetLocation +// was called from and returns the wrapped error with the location information +// set. The returned error implements the Locationer interface. If err is nil +// then a nil error is returned. +func SetLocation(err error, callDepth int) error { + if err == nil { + return nil + } + + return newLocationError(err, callDepth) +} + +// fmtNoop provides an internal type for wrapping errors so they won't be +// printed in fmt type commands. As this type is used by the Hide function it's +// expected that error not be nil. +type fmtNoop struct { + error +} + +// Format implements the fmt.Formatter interface so that the error wrapped by +// fmtNoop will not be printed. +func (*fmtNoop) Format(_ fmt.State, r rune) {} + +// Is implements errors.Is. It useful for us to be able to check if an error +// chain has fmtNoop for formatting purposes. +func (f *fmtNoop) Is(err error) bool { + _, is := err.(*fmtNoop) + return is +} + +// Unwrap implements the errors.Unwrap method returning the error wrapped by +// fmtNoop. +func (f *fmtNoop) Unwrap() error { + return f.error +} + +// Hide takes an error and silences it's error string from appearing in fmt +// like +func Hide(err error) error { + if err == nil { + return nil + } + return &fmtNoop{err} +} diff --git a/vendor/github.com/juju/retry/.gitignore b/vendor/github.com/juju/retry/.gitignore new file mode 100644 index 00000000..9ed3b07c --- /dev/null +++ b/vendor/github.com/juju/retry/.gitignore @@ -0,0 +1 @@ +*.test diff --git a/vendor/github.com/juju/retry/LICENSE b/vendor/github.com/juju/retry/LICENSE new file mode 100644 index 00000000..ade9307b --- /dev/null +++ b/vendor/github.com/juju/retry/LICENSE @@ -0,0 +1,191 @@ +All files in this repository are licensed as follows. If you contribute +to this repository, it is assumed that you license your contribution +under the same license unless you state otherwise. + +All files Copyright (C) 2015 Canonical Ltd. unless otherwise specified in the file. + +This software is licensed under the LGPLv3, included below. + +As a special exception to the GNU Lesser General Public License version 3 +("LGPL3"), the copyright holders of this Library give you permission to +convey to a third party a Combined Work that links statically or dynamically +to this Library without providing any Minimal Corresponding Source or +Minimal Application Code as set out in 4d or providing the installation +information set out in section 4e, provided that you comply with the other +provisions of LGPL3 and provided that you meet, for the Application the +terms and conditions of the license(s) which apply to the Application. + +Except as stated in this special exception, the provisions of LGPL3 will +continue to comply in full to this Library. If you modify this Library, you +may apply this exception to your version of this Library, but you are not +obliged to do so. If you do not wish to do so, delete this exception +statement from your version. This exception does not (and cannot) modify any +license terms which apply to the Application, with which you must still +comply. + + + GNU LESSER GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + + This version of the GNU Lesser General Public License incorporates +the terms and conditions of version 3 of the GNU General Public +License, supplemented by the additional permissions listed below. + + 0. Additional Definitions. + + As used herein, "this License" refers to version 3 of the GNU Lesser +General Public License, and the "GNU GPL" refers to version 3 of the GNU +General Public License. + + "The Library" refers to a covered work governed by this License, +other than an Application or a Combined Work as defined below. + + An "Application" is any work that makes use of an interface provided +by the Library, but which is not otherwise based on the Library. +Defining a subclass of a class defined by the Library is deemed a mode +of using an interface provided by the Library. + + A "Combined Work" is a work produced by combining or linking an +Application with the Library. The particular version of the Library +with which the Combined Work was made is also called the "Linked +Version". + + The "Minimal Corresponding Source" for a Combined Work means the +Corresponding Source for the Combined Work, excluding any source code +for portions of the Combined Work that, considered in isolation, are +based on the Application, and not on the Linked Version. + + The "Corresponding Application Code" for a Combined Work means the +object code and/or source code for the Application, including any data +and utility programs needed for reproducing the Combined Work from the +Application, but excluding the System Libraries of the Combined Work. + + 1. Exception to Section 3 of the GNU GPL. + + You may convey a covered work under sections 3 and 4 of this License +without being bound by section 3 of the GNU GPL. + + 2. Conveying Modified Versions. + + If you modify a copy of the Library, and, in your modifications, a +facility refers to a function or data to be supplied by an Application +that uses the facility (other than as an argument passed when the +facility is invoked), then you may convey a copy of the modified +version: + + a) under this License, provided that you make a good faith effort to + ensure that, in the event an Application does not supply the + function or data, the facility still operates, and performs + whatever part of its purpose remains meaningful, or + + b) under the GNU GPL, with none of the additional permissions of + this License applicable to that copy. + + 3. Object Code Incorporating Material from Library Header Files. + + The object code form of an Application may incorporate material from +a header file that is part of the Library. You may convey such object +code under terms of your choice, provided that, if the incorporated +material is not limited to numerical parameters, data structure +layouts and accessors, or small macros, inline functions and templates +(ten or fewer lines in length), you do both of the following: + + a) Give prominent notice with each copy of the object code that the + Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the object code with a copy of the GNU GPL and this license + document. + + 4. Combined Works. + + You may convey a Combined Work under terms of your choice that, +taken together, effectively do not restrict modification of the +portions of the Library contained in the Combined Work and reverse +engineering for debugging such modifications, if you also do each of +the following: + + a) Give prominent notice with each copy of the Combined Work that + the Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the Combined Work with a copy of the GNU GPL and this license + document. + + c) For a Combined Work that displays copyright notices during + execution, include the copyright notice for the Library among + these notices, as well as a reference directing the user to the + copies of the GNU GPL and this license document. + + d) Do one of the following: + + 0) Convey the Minimal Corresponding Source under the terms of this + License, and the Corresponding Application Code in a form + suitable for, and under terms that permit, the user to + recombine or relink the Application with a modified version of + the Linked Version to produce a modified Combined Work, in the + manner specified by section 6 of the GNU GPL for conveying + Corresponding Source. + + 1) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (a) uses at run time + a copy of the Library already present on the user's computer + system, and (b) will operate properly with a modified version + of the Library that is interface-compatible with the Linked + Version. + + e) Provide Installation Information, but only if you would otherwise + be required to provide such information under section 6 of the + GNU GPL, and only to the extent that such information is + necessary to install and execute a modified version of the + Combined Work produced by recombining or relinking the + Application with a modified version of the Linked Version. (If + you use option 4d0, the Installation Information must accompany + the Minimal Corresponding Source and Corresponding Application + Code. If you use option 4d1, you must provide the Installation + Information in the manner specified by section 6 of the GNU GPL + for conveying Corresponding Source.) + + 5. Combined Libraries. + + You may place library facilities that are a work based on the +Library side by side in a single library together with other library +facilities that are not Applications and are not covered by this +License, and convey such a combined library under terms of your +choice, if you do both of the following: + + a) Accompany the combined library with a copy of the same work based + on the Library, uncombined with any other library facilities, + conveyed under the terms of this License. + + b) Give prominent notice with the combined library that part of it + is a work based on the Library, and explaining where to find the + accompanying uncombined form of the same work. + + 6. Revised Versions of the GNU Lesser General Public License. + + The Free Software Foundation may publish revised and/or new versions +of the GNU Lesser General Public License from time to time. Such new +versions will be similar in spirit to the present version, but may +differ in detail to address new problems or concerns. + + Each version is given a distinguishing version number. If the +Library as you received it specifies that a certain numbered version +of the GNU Lesser General Public License "or any later version" +applies to it, you have the option of following the terms and +conditions either of that published version or of any later version +published by the Free Software Foundation. If the Library as you +received it does not specify a version number of the GNU Lesser +General Public License, you may choose any version of the GNU Lesser +General Public License ever published by the Free Software Foundation. + + If the Library as you received it specifies that a proxy can decide +whether future versions of the GNU Lesser General Public License shall +apply, that proxy's public statement of acceptance of any version is +permanent authorization for you to choose that version for the +Library. diff --git a/vendor/github.com/juju/retry/Makefile b/vendor/github.com/juju/retry/Makefile new file mode 100644 index 00000000..6d36bad0 --- /dev/null +++ b/vendor/github.com/juju/retry/Makefile @@ -0,0 +1,15 @@ +PROJECT := github.com/juju/retry + +default: check + +check-licence: + @(fgrep -rl "Licensed under the LGPLv3" .;\ + fgrep -rl "MACHINE GENERATED BY THE COMMAND ABOVE; DO NOT EDIT" .;\ + find . -name "*.go") | sed -e 's,\./,,' | sort | uniq -u | \ + xargs -I {} echo FAIL: licence missed: {} + +check: check-licence + go test $(PROJECT)/... + +docs: + godoc2md $(PROJECT) > README.md diff --git a/vendor/github.com/juju/retry/README.md b/vendor/github.com/juju/retry/README.md new file mode 100644 index 00000000..1fbe9a47 --- /dev/null +++ b/vendor/github.com/juju/retry/README.md @@ -0,0 +1,277 @@ + +# retry + import "github.com/juju/retry" + +The retry package encapsulates the mechanism around retrying commands. + +The simple use is to call retry.Call with a function closure. + +```go + + + err := retry.Call(retry.CallArgs{ + Func: func() error { ... }, + Attempts: 5, + Delay: time.Minute, + Clock: clock.WallClock, + }) + +``` + +The bare minimum arguments that need to be specified are: +* Func - the function to call +* Attempts - the number of times to try Func before giving up, or a negative number for unlimited attempts (`retry.UnlimitedAttempts`) +* Delay - how long to wait between each try that returns an error +* Clock - either the wall clock, or some testing clock + +Any error that is returned from the `Func` is considered transient. +In order to identify some errors as fatal, pass in a function for the +`IsFatalError` CallArgs value. + +In order to have the `Delay` change for each iteration, a `BackoffFunc` +needs to be set on the CallArgs. A simple doubling delay function is +provided by `DoubleDelay`. + +An example of a more complex `BackoffFunc` could be a stepped function such +as: + +```go + + + func StepDelay(last time.Duration, attempt int) time.Duration { + switch attempt{ + case 1: + return time.Second + case 2: + return 5 * time.Second + case 3: + return 20 * time.Second + case 4: + return time.Minute + case 5: + return 5 * time.Minute + default: + return 2 * last + } + } + +``` + +Consider some package `foo` that has a `TryAgainError`, which looks something +like this: +```go + + + type TryAgainError struct { + After time.Duration + } + +``` +and we create something that looks like this: + +```go + + + type TryAgainHelper struct { + next time.Duration + } + + func (h *TryAgainHelper) notify(lastError error, attempt int) { + if tryAgain, ok := lastError.(*foo.TryAgainError); ok { + h.next = tryAgain.After + } else { + h.next = 0 + } + } + + func (h *TryAgainHelper) next(last time.Duration) time.Duration { + if h.next != 0 { + return h.next + } + return last + } + +``` + +Then we could do this: +```go + + + helper := TryAgainHelper{} + retry.Call(retry.CallArgs{ + Func: func() error { + return foo.SomeFunc() + }, + NotifyFunc: helper.notify, + BackoffFunc: helper.next, + Attempts: 20, + Delay: 100 * time.Millisecond, + Clock: clock.WallClock, + }) + +``` + + + + +## Constants +``` go +const ( + // UnlimitedAttempts can be used as a value for `Attempts` to clearly + // show to the reader that there is no limit to the number of attempts. + UnlimitedAttempts = -1 +) +``` + + +## func Call +``` go +func Call(args CallArgs) error +``` +Call will repeatedly execute the Func until either the function returns no +error, the retry count is exceeded or the stop channel is closed. + + +## func DoubleDelay +``` go +func DoubleDelay(delay time.Duration, attempt int) time.Duration +``` +DoubleDelay provides a simple function that doubles the duration passed in. +This can then be easily used as the `BackoffFunc` in the `CallArgs` +structure. + +## func ExpBackoff +``` go +func ExpBackoff(minDelay, maxDelay time.Duration, exp float64, applyJitter bool) func(time.Duration, int) time.Duration { +``` +ExpBackoff returns a function a which generates time.Duration values using an +exponential back-off algorithm with the specified parameters. The returned value +can then be easily used as the `BackoffFunc` in the `CallArgs` structure. + +The next delay value is calculated using the following formula: + `newDelay = min(minDelay * exp^attempt, maxDelay)` + +If `applyJitter` is set to `true`, the function will randomly select and return +back a value in the `[minDelay, newDelay]` range. + +## func IsAttemptsExceeded +``` go +func IsAttemptsExceeded(err error) bool +``` +IsAttemptsExceeded returns true if the error is the result of the `Call` +function finishing due to hitting the requested number of `Attempts`. + + +## func IsDurationExceeded +``` go +func IsDurationExceeded(err error) bool +``` +IsDurationExceeded returns true if the error is the result of the `Call` +function finishing due to the total duration exceeding the specified +`MaxDuration` value. + + +## func IsRetryStopped +``` go +func IsRetryStopped(err error) bool +``` +IsRetryStopped returns true if the error is the result of the `Call` +function finishing due to the stop channel being closed. + + +## func LastError +``` go +func LastError(err error) error +``` +LastError retrieves the last error returned from `Func` before iteration +was terminated due to the attempt count being exceeded, the maximum +duration being exceeded, or the stop channel being closed. + + + +## type CallArgs +``` go +type CallArgs struct { + // Func is the function that will be retried if it returns an error result. + Func func() error + + // IsFatalError is a function that, if set, will be called for every non- + // nil error result from `Func`. If `IsFatalError` returns true, the error + // is immediately returned breaking out from any further retries. + IsFatalError func(error) bool + + // NotifyFunc is a function that is called if Func fails, and the attempt + // number. The first time this function is called attempt is 1, the second + // time, attempt is 2 and so on. + NotifyFunc func(lastError error, attempt int) + + // Attempts specifies the number of times Func should be retried before + // giving up and returning the `AttemptsExceeded` error. If a negative + // value is specified, the `Call` will retry forever. + Attempts int + + // Delay specifies how long to wait between retries. + Delay time.Duration + + // MaxDelay specifies how longest time to wait between retries. If no + // value is specified there is no maximum delay. + MaxDelay time.Duration + + // MaxDuration specifies the maximum time the `Call` function should spend + // iterating over `Func`. The duration is calculated from the start of the + // `Call` function. If the next delay time would take the total duration + // of the call over MaxDuration, then a DurationExceeded error is + // returned. If no value is specified, Call will continue until the number + // of attempts is complete. + MaxDuration time.Duration + + // BackoffFunc allows the caller to provide a function that alters the + // delay each time through the loop. If this function is not provided the + // delay is the same each iteration. Alternatively a function such as + // `retry.DoubleDelay` can be used that will provide an exponential + // backoff. The first time this function is called attempt is 1, the + // second time, attempt is 2 and so on. + BackoffFunc func(delay time.Duration, attempt int) time.Duration + + // Clock provides the mechanism for waiting. Normal program execution is + // expected to use something like clock.WallClock, and tests can override + // this to not actually sleep in tests. + Clock clock.Clock + + // Stop is a channel that can be used to indicate that the waiting should + // be interrupted. If Stop is nil, then the Call function cannot be interrupted. + // If the channel is closed prior to the Call function being executed, the + // Func is still attempted once. + Stop <-chan struct{} +} +``` +CallArgs is a simple structure used to define the behaviour of the Call +function. + + + + + + + + + + + +### func (\*CallArgs) Validate +``` go +func (args *CallArgs) Validate() error +``` +Validate the values are valid. The ensures that the Func, Delay, Attempts +and Clock have been specified. + + + + + + + + + +- - - +Generated by [godoc2md](http://godoc.org/github.com/davecheney/godoc2md) diff --git a/vendor/github.com/juju/retry/clock.go b/vendor/github.com/juju/retry/clock.go new file mode 100644 index 00000000..3451fbf3 --- /dev/null +++ b/vendor/github.com/juju/retry/clock.go @@ -0,0 +1,16 @@ +// Copyright 2015 Canonical Ltd. +// Licensed under the LGPLv3, see LICENCE file for details. + +package retry + +import "time" + +// Clock provides an interface for dealing with clocks. +type Clock interface { + // Now returns the current clock time. + Now() time.Time + + // After waits for the duration to elapse and then sends the + // current time on the returned channel. + After(time.Duration) <-chan time.Time +} diff --git a/vendor/github.com/juju/retry/doc.go b/vendor/github.com/juju/retry/doc.go new file mode 100644 index 00000000..8a7393e0 --- /dev/null +++ b/vendor/github.com/juju/retry/doc.go @@ -0,0 +1,90 @@ +// Copyright 2015 Canonical Ltd. +// Licensed under the LGPLv3, see LICENCE file for details. + +// The retry package encapsulates the mechanism around retrying commands. +// +// The simple use is to call retry.Call with a function closure. +// +// err := retry.Call(retry.CallArgs{ +// Func: func() error { ... }, +// Attempts: 5, +// Delay: time.Minute, +// Clock: clock.WallClock, +// }) +// +// The bare minimum arguments that need to be specified are: +// - Func - the function to call +// - Attempts - the number of times to try Func before giving up, or a negative number for unlimited attempts (`retry.UnlimitedAttempts`) +// - Delay - how long to wait between each try that returns an error +// - Clock - either the wall clock, or some testing clock +// +// Any error that is returned from the Func is considered transient. +// In order to identify some errors as fatal, pass in a function for the +// IsFatalError CallArgs value. +// +// In order to have the Delay change for each iteration, a BackoffFunc +// needs to be set on the CallArgs. A simple doubling delay function is +// provided by DoubleDelay. +// +// An example of a more complex BackoffFunc could be a stepped function such +// as: +// +// func StepDelay(last time.Duration, attempt int) time.Duration { +// switch attempt{ +// case 1: +// return time.Second +// case 2: +// return 5 * time.Second +// case 3: +// return 20 * time.Second +// case 4: +// return time.Minute +// case 5: +// return 5 * time.Minute +// default: +// return 2 * last +// } +// } +// +// Consider some package foo that has a TryAgainError, which looks something +// like this: +// +// type TryAgainError struct { +// After time.Duration +// } +// +// and we create something that looks like this: +// +// type TryAgainHelper struct { +// next time.Duration +// } +// +// func (h *TryAgainHelper) notify(lastError error, attempt int) { +// if tryAgain, ok := lastError.(*foo.TryAgainError); ok { +// h.next = tryAgain.After +// } else { +// h.next = 0 +// } +// } +// +// func (h *TryAgainHelper) next(last time.Duration) time.Duration { +// if h.next != 0 { +// return h.next +// } +// return last +// } +// +// Then we could do this: +// +// helper := TryAgainHelper{} +// retry.Call(retry.CallArgs{ +// Func: func() error { +// return foo.SomeFunc() +// }, +// NotifyFunc: helper.notify, +// BackoffFunc: helper.next, +// Attempts: 20, +// Delay: 100 * time.Millisecond, +// Clock: clock.WallClock, +// }) +package retry diff --git a/vendor/github.com/juju/retry/retry.go b/vendor/github.com/juju/retry/retry.go new file mode 100644 index 00000000..d9964d6d --- /dev/null +++ b/vendor/github.com/juju/retry/retry.go @@ -0,0 +1,260 @@ +// Copyright 2015 Canonical Ltd. +// Licensed under the LGPLv3, see LICENCE file for details. + +package retry + +import ( + "fmt" + "math" + "math/rand" + "time" + + "github.com/juju/errors" +) + +const ( + // UnlimitedAttempts can be used as a value for Attempts to clearly + // show to the reader that there is no limit to the number of attempts. + UnlimitedAttempts = -1 +) + +// retryStopped is the error that is returned from the Call function +// when the stop channel has been closed. +type retryStopped struct { + lastError error +} + +// Error provides the implementation for the error interface method. +func (e *retryStopped) Error() string { + return fmt.Sprintf("retry stopped") +} + +// attemptsExceeded is the error that is returned when the retry count has +// been hit without the function returning a nil error result. The last error +// returned from the function being retried is available as the LastError +// attribute. +type attemptsExceeded struct { + lastError error +} + +// Error provides the implementation for the error interface method. +func (e *attemptsExceeded) Error() string { + return fmt.Sprintf("attempt count exceeded: %s", e.lastError) +} + +// durationExceeded is the error that is returned when the total time that the +// Call function would have executed exceeds the MaxDuration specified. +// The last error returned from the function being retried is available as the +// LastError attribute. +type durationExceeded struct { + lastError error +} + +// Error provides the implementation for the error interface method. +func (e *durationExceeded) Error() string { + return fmt.Sprintf("max duration exceeded: %s", e.lastError) +} + +// LastError retrieves the last error returned from Func before iteration +// was terminated due to the attempt count being exceeded, the maximum +// duration being exceeded, or the stop channel being closed. +func LastError(err error) error { + cause := errors.Cause(err) + switch err := cause.(type) { + case *attemptsExceeded: + return err.lastError + case *retryStopped: + return err.lastError + case *durationExceeded: + return err.lastError + } + return errors.Errorf("unexpected error type: %T, %s", cause, cause) +} + +// IsAttemptsExceeded returns true if the error is the result of the Call +// function finishing due to hitting the requested number of Attempts. +func IsAttemptsExceeded(err error) bool { + cause := errors.Cause(err) + _, ok := cause.(*attemptsExceeded) + return ok +} + +// IsDurationExceeded returns true if the error is the result of the Call +// function finishing due to the total duration exceeding the specified +// MaxDuration value. +func IsDurationExceeded(err error) bool { + cause := errors.Cause(err) + _, ok := cause.(*durationExceeded) + return ok +} + +// IsRetryStopped returns true if the error is the result of the Call +// function finishing due to the stop channel being closed. +func IsRetryStopped(err error) bool { + cause := errors.Cause(err) + _, ok := cause.(*retryStopped) + return ok +} + +// CallArgs is a simple structure used to define the behaviour of the Call +// function. +type CallArgs struct { + // Func is the function that will be retried if it returns an error result. + Func func() error + + // IsFatalError is a function that, if set, will be called for every non- + // nil error result from Func. If IsFatalError returns true, the error + // is immediately returned breaking out from any further retries. + IsFatalError func(error) bool + + // NotifyFunc is a function that is called if Func fails, and the attempt + // number. The first time this function is called attempt is 1, the second + // time, attempt is 2 and so on. + NotifyFunc func(lastError error, attempt int) + + // Attempts specifies the number of times Func should be retried before + // giving up and returning the AttemptsExceeded error. If a negative + // value is specified, the Call will retry forever. + Attempts int + + // Delay specifies how long to wait between retries. + Delay time.Duration + + // MaxDelay specifies how longest time to wait between retries. If no + // value is specified there is no maximum delay. + MaxDelay time.Duration + + // MaxDuration specifies the maximum time the Call function should spend + // iterating over Func. The duration is calculated from the start of the + // Call function. If the next delay time would take the total duration + // of the call over MaxDuration, then a DurationExceeded error is + // returned. If no value is specified, Call will continue until the number + // of attempts is complete. + MaxDuration time.Duration + + // BackoffFunc allows the caller to provide a function that alters the + // delay each time through the loop. If this function is not provided the + // delay is the same each iteration. Alternatively a function such as + // retry.DoubleDelay can be used that will provide an exponential + // backoff. The first time this function is called attempt is 1, the + // second time, attempt is 2 and so on. + BackoffFunc func(delay time.Duration, attempt int) time.Duration + + // Clock provides the mechanism for waiting. Normal program execution is + // expected to use something like clock.WallClock, and tests can override + // this to not actually sleep in tests. + Clock Clock + + // Stop is a channel that can be used to indicate that the waiting should + // be interrupted. If Stop is nil, then the Call function cannot be interrupted. + // If the channel is closed prior to the Call function being executed, the + // Func is still attempted once. + Stop <-chan struct{} +} + +// Validate the values are valid. The ensures that the Func, Delay, Attempts +// and Clock have been specified. +func (args *CallArgs) Validate() error { + if args.Func == nil { + return errors.NotValidf("missing Func") + } + if args.Delay == 0 { + return errors.NotValidf("missing Delay") + } + if args.Clock == nil { + return errors.NotValidf("missing Clock") + } + // One of Attempts or MaxDuration need to be specified + if args.Attempts == 0 && args.MaxDuration == 0 { + return errors.NotValidf("missing Attempts or MaxDuration") + } + return nil +} + +// Call will repeatedly execute the Func until either the function returns no +// error, the retry count is exceeded or the stop channel is closed. +func Call(args CallArgs) error { + err := args.Validate() + if err != nil { + return errors.Trace(err) + } + start := args.Clock.Now() + for i := 1; args.Attempts <= 0 || i <= args.Attempts; i++ { + err = args.Func() + if err == nil { + return nil + } + if args.IsFatalError != nil && args.IsFatalError(err) { + return errors.Trace(err) + } + if args.NotifyFunc != nil { + args.NotifyFunc(err, i) + } + if i == args.Attempts && args.Attempts > 0 { + break // don't wait before returning the error + } + + if args.BackoffFunc != nil { + delay := args.BackoffFunc(args.Delay, i) + if delay > args.MaxDelay && args.MaxDelay > 0 { + delay = args.MaxDelay + } + args.Delay = delay + } + elapsedTime := args.Clock.Now().Sub(start) + if args.MaxDuration > 0 && (elapsedTime+args.Delay) > args.MaxDuration { + return errors.Wrap(err, &durationExceeded{err}) + } + + // Wait for the delay, and retry + select { + case <-args.Clock.After(args.Delay): + case <-args.Stop: + return errors.Wrap(err, &retryStopped{err}) + } + } + return errors.Wrap(err, &attemptsExceeded{err}) +} + +// DoubleDelay provides a simple function that doubles the duration passed in. +// This can then be easily used as the BackoffFunc in the CallArgs +// structure. +func DoubleDelay(delay time.Duration, attempt int) time.Duration { + if attempt == 1 { + return delay + } + return delay * 2 +} + +// ExpBackoff returns a function a which generates time.Duration values using +// an exponential back-off algorithm with the specified parameters. The +// returned value can then be easily used as the BackoffFunc in the CallArgs +// structure. +// +// The next delay value is calculated using the following formula: +// +// newDelay = min(minDelay * exp^attempt, maxDelay) +// +// If applyJitter is set to true, the function will randomly select and return +// back a value in the [minDelay, newDelay] range. +func ExpBackoff(minDelay, maxDelay time.Duration, exp float64, applyJitter bool) func(time.Duration, int) time.Duration { + minDelayF := float64(minDelay) + maxDelayF := float64(maxDelay) + return func(_ time.Duration, attempt int) time.Duration { + newDelay := minDelayF * math.Pow(exp, float64(attempt)) + + // Return a random value in the [minDelay, newDelay) range. + if applyJitter { + // We want to go +/- 20%, which is a 40% swing, and + // Float64 returns in the range 0-1 + newDelay = (1 + rand.Float64()*0.4 - 0.2) * newDelay + } + if newDelay < minDelayF { + newDelay = minDelayF + } + if newDelay > maxDelayF { + newDelay = maxDelayF + } + return time.Duration(newDelay).Round(time.Millisecond) + } +} diff --git a/vendor/github.com/mattn/go-sqlite3/README.md b/vendor/github.com/mattn/go-sqlite3/README.md index 76f49bac..3b43b033 100644 --- a/vendor/github.com/mattn/go-sqlite3/README.md +++ b/vendor/github.com/mattn/go-sqlite3/README.md @@ -351,8 +351,6 @@ For example the TDM-GCC Toolchain can be found [here](https://jmeubank.github.io # User Authentication -***This is deprecated*** - This package supports the SQLite User Authentication module. ## Compile diff --git a/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.c b/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.c index 44d91d9d..e9cca66c 100644 --- a/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.c +++ b/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.c @@ -1,7 +1,7 @@ #ifndef USE_LIBSQLITE3 /****************************************************************************** ** This file is an amalgamation of many separate C source files from SQLite -** version 3.50.3. By combining all the individual C code files into this +** version 3.49.1. By combining all the individual C code files into this ** single large file, the entire code can be compiled as a single translation ** unit. This allows many compilers to do optimizations that would not be ** possible if the files were compiled separately. Performance improvements @@ -19,7 +19,7 @@ ** separate file. This file contains only code for the core SQLite library. ** ** The content in this amalgamation comes from Fossil check-in -** 3ce993b8657d6d9deda380a93cdd6404a8c8 with changes in files: +** 873d4e274b4988d260ba8354a9718324a1c2 with changes in files: ** ** */ @@ -453,7 +453,7 @@ extern "C" { ** ** Since [version 3.6.18] ([dateof:3.6.18]), ** SQLite source code has been stored in the -** Fossil configuration management +** Fossil configuration management ** system. ^The SQLITE_SOURCE_ID macro evaluates to ** a string which identifies a particular check-in of SQLite ** within its configuration management system. ^The SQLITE_SOURCE_ID @@ -466,9 +466,9 @@ extern "C" { ** [sqlite3_libversion_number()], [sqlite3_sourceid()], ** [sqlite_version()] and [sqlite_source_id()]. */ -#define SQLITE_VERSION "3.50.3" -#define SQLITE_VERSION_NUMBER 3050003 -#define SQLITE_SOURCE_ID "2025-07-17 13:25:10 3ce993b8657d6d9deda380a93cdd6404a8c8ba1b185b2bc423703e41ae5f2543" +#define SQLITE_VERSION "3.49.1" +#define SQLITE_VERSION_NUMBER 3049001 +#define SQLITE_SOURCE_ID "2025-02-18 13:38:58 873d4e274b4988d260ba8354a9718324a1c26187a4ab4c1cc0227c03d0f10e70" /* ** CAPI3REF: Run-Time Library Version Numbers @@ -1483,12 +1483,6 @@ struct sqlite3_io_methods { ** the value that M is to be set to. Before returning, the 32-bit signed ** integer is overwritten with the previous value of M. ** -**
  • [[SQLITE_FCNTL_BLOCK_ON_CONNECT]] -** The [SQLITE_FCNTL_BLOCK_ON_CONNECT] opcode is used to configure the -** VFS to block when taking a SHARED lock to connect to a wal mode database. -** This is used to implement the functionality associated with -** SQLITE_SETLK_BLOCK_ON_CONNECT. -** **
  • [[SQLITE_FCNTL_DATA_VERSION]] ** The [SQLITE_FCNTL_DATA_VERSION] opcode is used to detect changes to ** a database file. The argument is a pointer to a 32-bit unsigned integer. @@ -1585,7 +1579,6 @@ struct sqlite3_io_methods { #define SQLITE_FCNTL_CKSM_FILE 41 #define SQLITE_FCNTL_RESET_CACHE 42 #define SQLITE_FCNTL_NULL_IO 43 -#define SQLITE_FCNTL_BLOCK_ON_CONNECT 44 /* deprecated names */ #define SQLITE_GET_LOCKPROXYFILE SQLITE_FCNTL_GET_LOCKPROXYFILE @@ -2316,16 +2309,13 @@ struct sqlite3_mem_methods { ** ** [[SQLITE_CONFIG_LOOKASIDE]]
    SQLITE_CONFIG_LOOKASIDE
    **
    ^(The SQLITE_CONFIG_LOOKASIDE option takes two arguments that determine -** the default size of [lookaside memory] on each [database connection]. +** the default size of lookaside memory on each [database connection]. ** The first argument is the -** size of each lookaside buffer slot ("sz") and the second is the number of -** slots allocated to each database connection ("cnt").)^ -** ^(SQLITE_CONFIG_LOOKASIDE sets the default lookaside size. -** The [SQLITE_DBCONFIG_LOOKASIDE] option to [sqlite3_db_config()] can -** be used to change the lookaside configuration on individual connections.)^ -** The [-DSQLITE_DEFAULT_LOOKASIDE] option can be used to change the -** default lookaside configuration at compile-time. -**
    +** size of each lookaside buffer slot and the second is the number of +** slots allocated to each database connection.)^ ^(SQLITE_CONFIG_LOOKASIDE +** sets the default lookaside size. The [SQLITE_DBCONFIG_LOOKASIDE] +** option to [sqlite3_db_config()] can be used to change the lookaside +** configuration on individual connections.)^ ** ** [[SQLITE_CONFIG_PCACHE2]]
    SQLITE_CONFIG_PCACHE2
    **
    ^(The SQLITE_CONFIG_PCACHE2 option takes a single argument which is @@ -2562,50 +2552,31 @@ struct sqlite3_mem_methods { ** [[SQLITE_DBCONFIG_LOOKASIDE]] **
    SQLITE_DBCONFIG_LOOKASIDE
    **
    The SQLITE_DBCONFIG_LOOKASIDE option is used to adjust the -** configuration of the [lookaside memory allocator] within a database +** configuration of the lookaside memory allocator within a database ** connection. ** The arguments to the SQLITE_DBCONFIG_LOOKASIDE option are not ** in the [DBCONFIG arguments|usual format]. ** The SQLITE_DBCONFIG_LOOKASIDE option takes three arguments, not two, ** so that a call to [sqlite3_db_config()] that uses SQLITE_DBCONFIG_LOOKASIDE ** should have a total of five parameters. -**
      -**
    1. The first argument ("buf") is a +** ^The first argument (the third parameter to [sqlite3_db_config()] is a ** pointer to a memory buffer to use for lookaside memory. -** The first argument may be NULL in which case SQLite will allocate the -** lookaside buffer itself using [sqlite3_malloc()]. -**

    2. The second argument ("sz") is the -** size of each lookaside buffer slot. Lookaside is disabled if "sz" -** is less than 8. The "sz" argument should be a multiple of 8 less than -** 65536. If "sz" does not meet this constraint, it is reduced in size until -** it does. -**

    3. The third argument ("cnt") is the number of slots. Lookaside is disabled -** if "cnt"is less than 1. The "cnt" value will be reduced, if necessary, so -** that the product of "sz" and "cnt" does not exceed 2,147,418,112. The "cnt" -** parameter is usually chosen so that the product of "sz" and "cnt" is less -** than 1,000,000. -**

    -**

    If the "buf" argument is not NULL, then it must -** point to a memory buffer with a size that is greater than -** or equal to the product of "sz" and "cnt". -** The buffer must be aligned to an 8-byte boundary. -** The lookaside memory +** ^The first argument after the SQLITE_DBCONFIG_LOOKASIDE verb +** may be NULL in which case SQLite will allocate the +** lookaside buffer itself using [sqlite3_malloc()]. ^The second argument is the +** size of each lookaside buffer slot. ^The third argument is the number of +** slots. The size of the buffer in the first argument must be greater than +** or equal to the product of the second and third arguments. The buffer +** must be aligned to an 8-byte boundary. ^If the second argument to +** SQLITE_DBCONFIG_LOOKASIDE is not a multiple of 8, it is internally +** rounded down to the next smaller multiple of 8. ^(The lookaside memory ** configuration for a database connection can only be changed when that ** connection is not currently using lookaside memory, or in other words -** when the value returned by [SQLITE_DBSTATUS_LOOKASIDE_USED] is zero. +** when the "current value" returned by +** [sqlite3_db_status](D,[SQLITE_DBSTATUS_LOOKASIDE_USED],...) is zero. ** Any attempt to change the lookaside memory configuration when lookaside ** memory is in use leaves the configuration unchanged and returns -** [SQLITE_BUSY]. -** If the "buf" argument is NULL and an attempt -** to allocate memory based on "sz" and "cnt" fails, then -** lookaside is silently disabled. -**

    -** The [SQLITE_CONFIG_LOOKASIDE] configuration option can be used to set the -** default lookaside configuration at initialization. The -** [-DSQLITE_DEFAULT_LOOKASIDE] option can be used to set the default lookaside -** configuration at compile-time. Typical values for lookaside are 1200 for -** "sz" and 40 to 100 for "cnt". -**

    +** [SQLITE_BUSY].)^ ** ** [[SQLITE_DBCONFIG_ENABLE_FKEY]] **
    SQLITE_DBCONFIG_ENABLE_FKEY
    @@ -3342,44 +3313,6 @@ SQLITE_API int sqlite3_busy_handler(sqlite3*,int(*)(void*,int),void*); */ SQLITE_API int sqlite3_busy_timeout(sqlite3*, int ms); -/* -** CAPI3REF: Set the Setlk Timeout -** METHOD: sqlite3 -** -** This routine is only useful in SQLITE_ENABLE_SETLK_TIMEOUT builds. If -** the VFS supports blocking locks, it sets the timeout in ms used by -** eligible locks taken on wal mode databases by the specified database -** handle. In non-SQLITE_ENABLE_SETLK_TIMEOUT builds, or if the VFS does -** not support blocking locks, this function is a no-op. -** -** Passing 0 to this function disables blocking locks altogether. Passing -** -1 to this function requests that the VFS blocks for a long time - -** indefinitely if possible. The results of passing any other negative value -** are undefined. -** -** Internally, each SQLite database handle store two timeout values - the -** busy-timeout (used for rollback mode databases, or if the VFS does not -** support blocking locks) and the setlk-timeout (used for blocking locks -** on wal-mode databases). The sqlite3_busy_timeout() method sets both -** values, this function sets only the setlk-timeout value. Therefore, -** to configure separate busy-timeout and setlk-timeout values for a single -** database handle, call sqlite3_busy_timeout() followed by this function. -** -** Whenever the number of connections to a wal mode database falls from -** 1 to 0, the last connection takes an exclusive lock on the database, -** then checkpoints and deletes the wal file. While it is doing this, any -** new connection that tries to read from the database fails with an -** SQLITE_BUSY error. Or, if the SQLITE_SETLK_BLOCK_ON_CONNECT flag is -** passed to this API, the new connection blocks until the exclusive lock -** has been released. -*/ -SQLITE_API int sqlite3_setlk_timeout(sqlite3*, int ms, int flags); - -/* -** CAPI3REF: Flags for sqlite3_setlk_timeout() -*/ -#define SQLITE_SETLK_BLOCK_ON_CONNECT 0x01 - /* ** CAPI3REF: Convenience Routines For Running Queries ** METHOD: sqlite3 @@ -4399,7 +4332,7 @@ SQLITE_API sqlite3_file *sqlite3_database_file_object(const char*); ** ** The sqlite3_create_filename(D,J,W,N,P) allocates memory to hold a version of ** database filename D with corresponding journal file J and WAL file W and -** an array P of N URI Key/Value pairs. The result from +** with N URI parameters key/values pairs in the array P. The result from ** sqlite3_create_filename(D,J,W,N,P) is a pointer to a database filename that ** is safe to pass to routines like: **
      @@ -5080,7 +5013,7 @@ typedef struct sqlite3_context sqlite3_context; ** METHOD: sqlite3_stmt ** ** ^(In the SQL statement text input to [sqlite3_prepare_v2()] and its variants, -** literals may be replaced by a [parameter] that matches one of the following +** literals may be replaced by a [parameter] that matches one of following ** templates: ** **
        @@ -5125,7 +5058,7 @@ typedef struct sqlite3_context sqlite3_context; ** ** [[byte-order determination rules]] ^The byte-order of ** UTF16 input text is determined by the byte-order mark (BOM, U+FEFF) -** found in the first character, which is removed, or in the absence of a BOM +** found in first character, which is removed, or in the absence of a BOM ** the byte order is the native byte order of the host ** machine for sqlite3_bind_text16() or the byte order specified in ** the 6th parameter for sqlite3_bind_text64().)^ @@ -5145,7 +5078,7 @@ typedef struct sqlite3_context sqlite3_context; ** or sqlite3_bind_text16() or sqlite3_bind_text64() then ** that parameter must be the byte offset ** where the NUL terminator would occur assuming the string were NUL -** terminated. If any NUL characters occur at byte offsets less than +** terminated. If any NUL characters occurs at byte offsets less than ** the value of the fourth parameter then the resulting string value will ** contain embedded NULs. The result of expressions involving strings ** with embedded NULs is undefined. @@ -5357,7 +5290,7 @@ SQLITE_API const void *sqlite3_column_name16(sqlite3_stmt*, int N); ** METHOD: sqlite3_stmt ** ** ^These routines provide a means to determine the database, table, and -** table column that is the origin of a particular result column in a +** table column that is the origin of a particular result column in ** [SELECT] statement. ** ^The name of the database or table or column can be returned as ** either a UTF-8 or UTF-16 string. ^The _database_ routines return @@ -5495,7 +5428,7 @@ SQLITE_API const void *sqlite3_column_decltype16(sqlite3_stmt*,int); ** other than [SQLITE_ROW] before any subsequent invocation of ** sqlite3_step(). Failure to reset the prepared statement using ** [sqlite3_reset()] would result in an [SQLITE_MISUSE] return from -** sqlite3_step(). But after [version 3.6.23.1] ([dateof:3.6.23.1]), +** sqlite3_step(). But after [version 3.6.23.1] ([dateof:3.6.23.1], ** sqlite3_step() began ** calling [sqlite3_reset()] automatically in this circumstance rather ** than returning [SQLITE_MISUSE]. This is not considered a compatibility @@ -5926,8 +5859,8 @@ SQLITE_API int sqlite3_reset(sqlite3_stmt *pStmt); ** ** For best security, the [SQLITE_DIRECTONLY] flag is recommended for ** all application-defined SQL functions that do not need to be -** used inside of triggers, views, CHECK constraints, or other elements of -** the database schema. This flag is especially recommended for SQL +** used inside of triggers, view, CHECK constraints, or other elements of +** the database schema. This flags is especially recommended for SQL ** functions that have side effects or reveal internal application state. ** Without this flag, an attacker might be able to modify the schema of ** a database file to include invocations of the function with parameters @@ -5958,7 +5891,7 @@ SQLITE_API int sqlite3_reset(sqlite3_stmt *pStmt); ** [user-defined window functions|available here]. ** ** ^(If the final parameter to sqlite3_create_function_v2() or -** sqlite3_create_window_function() is not NULL, then it is the destructor for +** sqlite3_create_window_function() is not NULL, then it is destructor for ** the application data pointer. The destructor is invoked when the function ** is deleted, either by being overloaded or when the database connection ** closes.)^ ^The destructor is also invoked if the call to @@ -6358,7 +6291,7 @@ SQLITE_API unsigned int sqlite3_value_subtype(sqlite3_value*); ** METHOD: sqlite3_value ** ** ^The sqlite3_value_dup(V) interface makes a copy of the [sqlite3_value] -** object V and returns a pointer to that copy. ^The [sqlite3_value] returned +** object D and returns a pointer to that copy. ^The [sqlite3_value] returned ** is a [protected sqlite3_value] object even if the input is not. ** ^The sqlite3_value_dup(V) interface returns NULL if V is NULL or if a ** memory allocation fails. ^If V is a [pointer value], then the result @@ -6396,7 +6329,7 @@ SQLITE_API void sqlite3_value_free(sqlite3_value*); ** allocation error occurs. ** ** ^(The amount of space allocated by sqlite3_aggregate_context(C,N) is -** determined by the N parameter on the first successful call. Changing the +** determined by the N parameter on first successful call. Changing the ** value of N in any subsequent call to sqlite3_aggregate_context() within ** the same aggregate function instance will not resize the memory ** allocation.)^ Within the xFinal callback, it is customary to set @@ -6558,7 +6491,7 @@ SQLITE_API void sqlite3_set_auxdata(sqlite3_context*, int N, void*, void (*)(voi ** ** Security Warning: These interfaces should not be exposed in scripting ** languages or in other circumstances where it might be possible for an -** attacker to invoke them. Any agent that can invoke these interfaces +** an attacker to invoke them. Any agent that can invoke these interfaces ** can probably also take control of the process. ** ** Database connection client data is only available for SQLite @@ -6672,7 +6605,7 @@ typedef void (*sqlite3_destructor_type)(void*); ** pointed to by the 2nd parameter are taken as the application-defined ** function result. If the 3rd parameter is non-negative, then it ** must be the byte offset into the string where the NUL terminator would -** appear if the string were NUL terminated. If any NUL characters occur +** appear if the string where NUL terminated. If any NUL characters occur ** in the string at a byte offset that is less than the value of the 3rd ** parameter, then the resulting string will contain embedded NULs and the ** result of expressions operating on strings with embedded NULs is undefined. @@ -6730,7 +6663,7 @@ typedef void (*sqlite3_destructor_type)(void*); ** string and preferably a string literal. The sqlite3_result_pointer() ** routine is part of the [pointer passing interface] added for SQLite 3.20.0. ** -** If these routines are called from within a different thread +** If these routines are called from within the different thread ** than the one containing the application-defined function that received ** the [sqlite3_context] pointer, the results are undefined. */ @@ -7136,7 +7069,7 @@ SQLITE_API sqlite3 *sqlite3_db_handle(sqlite3_stmt*); ** METHOD: sqlite3 ** ** ^The sqlite3_db_name(D,N) interface returns a pointer to the schema name -** for the N-th database on database connection D, or a NULL pointer if N is +** for the N-th database on database connection D, or a NULL pointer of N is ** out of range. An N value of 0 means the main database file. An N of 1 is ** the "temp" schema. Larger values of N correspond to various ATTACH-ed ** databases. @@ -7231,7 +7164,7 @@ SQLITE_API int sqlite3_txn_state(sqlite3*,const char *zSchema); **
        The SQLITE_TXN_READ state means that the database is currently ** in a read transaction. Content has been read from the database file ** but nothing in the database file has changed. The transaction state -** will be advanced to SQLITE_TXN_WRITE if any changes occur and there are +** will advanced to SQLITE_TXN_WRITE if any changes occur and there are ** no other conflicting concurrent write transactions. The transaction ** state will revert to SQLITE_TXN_NONE following a [ROLLBACK] or ** [COMMIT].
        @@ -7240,7 +7173,7 @@ SQLITE_API int sqlite3_txn_state(sqlite3*,const char *zSchema); **
        The SQLITE_TXN_WRITE state means that the database is currently ** in a write transaction. Content has been written to the database file ** but has not yet committed. The transaction state will change to -** SQLITE_TXN_NONE at the next [ROLLBACK] or [COMMIT].
        +** to SQLITE_TXN_NONE at the next [ROLLBACK] or [COMMIT]. */ #define SQLITE_TXN_NONE 0 #define SQLITE_TXN_READ 1 @@ -7391,8 +7324,6 @@ SQLITE_API int sqlite3_autovacuum_pages( ** ** ^The second argument is a pointer to the function to invoke when a ** row is updated, inserted or deleted in a rowid table. -** ^The update hook is disabled by invoking sqlite3_update_hook() -** with a NULL pointer as the second parameter. ** ^The first argument to the callback is a copy of the third argument ** to sqlite3_update_hook(). ** ^The second callback argument is one of [SQLITE_INSERT], [SQLITE_DELETE], @@ -7521,7 +7452,7 @@ SQLITE_API int sqlite3_db_release_memory(sqlite3*); ** CAPI3REF: Impose A Limit On Heap Size ** ** These interfaces impose limits on the amount of heap memory that will be -** used by all database connections within a single process. +** by all database connections within a single process. ** ** ^The sqlite3_soft_heap_limit64() interface sets and/or queries the ** soft limit on the amount of heap memory that may be allocated by SQLite. @@ -7579,7 +7510,7 @@ SQLITE_API int sqlite3_db_release_memory(sqlite3*); **
      )^ ** ** The circumstances under which SQLite will enforce the heap limits may -** change in future releases of SQLite. +** changes in future releases of SQLite. */ SQLITE_API sqlite3_int64 sqlite3_soft_heap_limit64(sqlite3_int64 N); SQLITE_API sqlite3_int64 sqlite3_hard_heap_limit64(sqlite3_int64 N); @@ -7694,8 +7625,8 @@ SQLITE_API int sqlite3_table_column_metadata( ** ^The entry point is zProc. ** ^(zProc may be 0, in which case SQLite will try to come up with an ** entry point name on its own. It first tries "sqlite3_extension_init". -** If that does not work, it constructs a name "sqlite3_X_init" where -** X consists of the lower-case equivalent of all ASCII alphabetic +** If that does not work, it constructs a name "sqlite3_X_init" where the +** X is consists of the lower-case equivalent of all ASCII alphabetic ** characters in the filename from the last "/" to the first following ** "." and omitting any initial "lib".)^ ** ^The sqlite3_load_extension() interface returns @@ -7766,7 +7697,7 @@ SQLITE_API int sqlite3_enable_load_extension(sqlite3 *db, int onoff); ** ^(Even though the function prototype shows that xEntryPoint() takes ** no arguments and returns void, SQLite invokes xEntryPoint() with three ** arguments and expects an integer result as if the signature of the -** entry point were as follows: +** entry point where as follows: ** **
       **    int xEntryPoint(
      @@ -7930,7 +7861,7 @@ struct sqlite3_module {
       ** virtual table and might not be checked again by the byte code.)^ ^(The
       ** aConstraintUsage[].omit flag is an optimization hint. When the omit flag
       ** is left in its default setting of false, the constraint will always be
      -** checked separately in byte code.  If the omit flag is changed to true, then
      +** checked separately in byte code.  If the omit flag is change to true, then
       ** the constraint may or may not be checked in byte code.  In other words,
       ** when the omit flag is true there is no guarantee that the constraint will
       ** not be checked again using byte code.)^
      @@ -7956,7 +7887,7 @@ struct sqlite3_module {
       ** The xBestIndex method may optionally populate the idxFlags field with a
       ** mask of SQLITE_INDEX_SCAN_* flags. One such flag is
       ** [SQLITE_INDEX_SCAN_HEX], which if set causes the [EXPLAIN QUERY PLAN]
      -** output to show the idxNum as hex instead of as decimal.  Another flag is
      +** output to show the idxNum has hex instead of as decimal.  Another flag is
       ** SQLITE_INDEX_SCAN_UNIQUE, which if set indicates that the query plan will
       ** return at most one row.
       **
      @@ -8097,7 +8028,7 @@ struct sqlite3_index_info {
       ** the implementation of the [virtual table module].   ^The fourth
       ** parameter is an arbitrary client data pointer that is passed through
       ** into the [xCreate] and [xConnect] methods of the virtual table module
      -** when a new virtual table is being created or reinitialized.
      +** when a new virtual table is be being created or reinitialized.
       **
       ** ^The sqlite3_create_module_v2() interface has a fifth parameter which
       ** is a pointer to a destructor for the pClientData.  ^SQLite will
      @@ -8262,7 +8193,7 @@ typedef struct sqlite3_blob sqlite3_blob;
       ** in *ppBlob. Otherwise an [error code] is returned and, unless the error
       ** code is SQLITE_MISUSE, *ppBlob is set to NULL.)^ ^This means that, provided
       ** the API is not misused, it is always safe to call [sqlite3_blob_close()]
      -** on *ppBlob after this function returns.
      +** on *ppBlob after this function it returns.
       **
       ** This function fails with SQLITE_ERROR if any of the following are true:
       ** 
        @@ -8382,7 +8313,7 @@ SQLITE_API int sqlite3_blob_close(sqlite3_blob *); ** ** ^Returns the size in bytes of the BLOB accessible via the ** successfully opened [BLOB handle] in its only argument. ^The -** incremental blob I/O routines can only read or overwrite existing +** incremental blob I/O routines can only read or overwriting existing ** blob content; they cannot change the size of a blob. ** ** This routine only works on a [BLOB handle] which has been created @@ -8532,7 +8463,7 @@ SQLITE_API int sqlite3_vfs_unregister(sqlite3_vfs*); ** ^The sqlite3_mutex_alloc() routine allocates a new ** mutex and returns a pointer to it. ^The sqlite3_mutex_alloc() ** routine returns NULL if it is unable to allocate the requested -** mutex. The argument to sqlite3_mutex_alloc() must be one of these +** mutex. The argument to sqlite3_mutex_alloc() must one of these ** integer constants: ** **
          @@ -8765,7 +8696,7 @@ SQLITE_API int sqlite3_mutex_notheld(sqlite3_mutex*); ** CAPI3REF: Retrieve the mutex for a database connection ** METHOD: sqlite3 ** -** ^This interface returns a pointer to the [sqlite3_mutex] object that +** ^This interface returns a pointer the [sqlite3_mutex] object that ** serializes access to the [database connection] given in the argument ** when the [threading mode] is Serialized. ** ^If the [threading mode] is Single-thread or Multi-thread then this @@ -8888,7 +8819,7 @@ SQLITE_API int sqlite3_test_control(int op, ...); ** CAPI3REF: SQL Keyword Checking ** ** These routines provide access to the set of SQL language keywords -** recognized by SQLite. Applications can use these routines to determine +** recognized by SQLite. Applications can uses these routines to determine ** whether or not a specific identifier needs to be escaped (for example, ** by enclosing in double-quotes) so as not to confuse the parser. ** @@ -9056,7 +8987,7 @@ SQLITE_API void sqlite3_str_reset(sqlite3_str*); ** content of the dynamic string under construction in X. The value ** returned by [sqlite3_str_value(X)] is managed by the sqlite3_str object X ** and might be freed or altered by any subsequent method on the same -** [sqlite3_str] object. Applications must not use the pointer returned by +** [sqlite3_str] object. Applications must not used the pointer returned ** [sqlite3_str_value(X)] after any subsequent method call on the same ** object. ^Applications may change the content of the string returned ** by [sqlite3_str_value(X)] as long as they do not write into any bytes @@ -9142,7 +9073,7 @@ SQLITE_API int sqlite3_status64( ** allocation which could not be satisfied by the [SQLITE_CONFIG_PAGECACHE] ** buffer and where forced to overflow to [sqlite3_malloc()]. The ** returned value includes allocations that overflowed because they -** were too large (they were larger than the "sz" parameter to +** where too large (they were larger than the "sz" parameter to ** [SQLITE_CONFIG_PAGECACHE]) and allocations that overflowed because ** no space was left in the page cache.)^ ** @@ -9226,29 +9157,28 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** [[SQLITE_DBSTATUS_LOOKASIDE_HIT]] ^(
          SQLITE_DBSTATUS_LOOKASIDE_HIT
          **
          This parameter returns the number of malloc attempts that were ** satisfied using lookaside memory. Only the high-water value is meaningful; -** the current value is always zero.
          )^ +** the current value is always zero.)^ ** ** [[SQLITE_DBSTATUS_LOOKASIDE_MISS_SIZE]] ** ^(
          SQLITE_DBSTATUS_LOOKASIDE_MISS_SIZE
          -**
          This parameter returns the number of malloc attempts that might have +**
          This parameter returns the number malloc attempts that might have ** been satisfied using lookaside memory but failed due to the amount of ** memory requested being larger than the lookaside slot size. ** Only the high-water value is meaningful; -** the current value is always zero.
          )^ +** the current value is always zero.)^ ** ** [[SQLITE_DBSTATUS_LOOKASIDE_MISS_FULL]] ** ^(
          SQLITE_DBSTATUS_LOOKASIDE_MISS_FULL
          -**
          This parameter returns the number of malloc attempts that might have +**
          This parameter returns the number malloc attempts that might have ** been satisfied using lookaside memory but failed due to all lookaside ** memory already being in use. ** Only the high-water value is meaningful; -** the current value is always zero.
          )^ +** the current value is always zero.)^ ** ** [[SQLITE_DBSTATUS_CACHE_USED]] ^(
          SQLITE_DBSTATUS_CACHE_USED
          **
          This parameter returns the approximate number of bytes of heap ** memory used by all pager caches associated with the database connection.)^ ** ^The highwater mark associated with SQLITE_DBSTATUS_CACHE_USED is always 0. -**
          ** ** [[SQLITE_DBSTATUS_CACHE_USED_SHARED]] ** ^(
          SQLITE_DBSTATUS_CACHE_USED_SHARED
          @@ -9257,10 +9187,10 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** memory used by that pager cache is divided evenly between the attached ** connections.)^ In other words, if none of the pager caches associated ** with the database connection are shared, this request returns the same -** value as DBSTATUS_CACHE_USED. Or, if one or more of the pager caches are +** value as DBSTATUS_CACHE_USED. Or, if one or more or the pager caches are ** shared, the value returned by this call will be smaller than that returned ** by DBSTATUS_CACHE_USED. ^The highwater mark associated with -** SQLITE_DBSTATUS_CACHE_USED_SHARED is always 0. +** SQLITE_DBSTATUS_CACHE_USED_SHARED is always 0. ** ** [[SQLITE_DBSTATUS_SCHEMA_USED]] ^(
          SQLITE_DBSTATUS_SCHEMA_USED
          **
          This parameter returns the approximate number of bytes of heap @@ -9270,7 +9200,6 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** schema memory is shared with other database connections due to ** [shared cache mode] being enabled. ** ^The highwater mark associated with SQLITE_DBSTATUS_SCHEMA_USED is always 0. -**
          ** ** [[SQLITE_DBSTATUS_STMT_USED]] ^(
          SQLITE_DBSTATUS_STMT_USED
          **
          This parameter returns the approximate number of bytes of heap @@ -9307,7 +9236,7 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** been written to disk in the middle of a transaction due to the page ** cache overflowing. Transactions are more efficient if they are written ** to disk all at once. When pages spill mid-transaction, that introduces -** additional overhead. This parameter can be used to help identify +** additional overhead. This parameter can be used help identify ** inefficiencies that can be resolved by increasing the cache size. **
          ** @@ -9378,13 +9307,13 @@ SQLITE_API int sqlite3_stmt_status(sqlite3_stmt*, int op,int resetFlg); ** [[SQLITE_STMTSTATUS_SORT]]
          SQLITE_STMTSTATUS_SORT
          **
          ^This is the number of sort operations that have occurred. ** A non-zero value in this counter may indicate an opportunity to -** improve performance through careful use of indices.
          +** improvement performance through careful use of indices. ** ** [[SQLITE_STMTSTATUS_AUTOINDEX]]
          SQLITE_STMTSTATUS_AUTOINDEX
          **
          ^This is the number of rows inserted into transient indices that ** were created automatically in order to help joins run faster. ** A non-zero value in this counter may indicate an opportunity to -** improve performance by adding permanent indices that do not +** improvement performance by adding permanent indices that do not ** need to be reinitialized each time the statement is run.
          ** ** [[SQLITE_STMTSTATUS_VM_STEP]]
          SQLITE_STMTSTATUS_VM_STEP
          @@ -9393,19 +9322,19 @@ SQLITE_API int sqlite3_stmt_status(sqlite3_stmt*, int op,int resetFlg); ** to 2147483647. The number of virtual machine operations can be ** used as a proxy for the total work done by the prepared statement. ** If the number of virtual machine operations exceeds 2147483647 -** then the value returned by this statement status code is undefined. +** then the value returned by this statement status code is undefined. ** ** [[SQLITE_STMTSTATUS_REPREPARE]]
          SQLITE_STMTSTATUS_REPREPARE
          **
          ^This is the number of times that the prepare statement has been ** automatically regenerated due to schema changes or changes to -** [bound parameters] that might affect the query plan.
          +** [bound parameters] that might affect the query plan. ** ** [[SQLITE_STMTSTATUS_RUN]]
          SQLITE_STMTSTATUS_RUN
          **
          ^This is the number of times that the prepared statement has ** been run. A single "run" for the purposes of this counter is one ** or more calls to [sqlite3_step()] followed by a call to [sqlite3_reset()]. ** The counter is incremented on the first [sqlite3_step()] call of each -** cycle.
          +** cycle. ** ** [[SQLITE_STMTSTATUS_FILTER_MISS]] ** [[SQLITE_STMTSTATUS_FILTER HIT]] @@ -9415,7 +9344,7 @@ SQLITE_API int sqlite3_stmt_status(sqlite3_stmt*, int op,int resetFlg); ** step was bypassed because a Bloom filter returned not-found. The ** corresponding SQLITE_STMTSTATUS_FILTER_MISS value is the number of ** times that the Bloom filter returned a find, and thus the join step -** had to be processed as normal. +** had to be processed as normal. ** ** [[SQLITE_STMTSTATUS_MEMUSED]]
          SQLITE_STMTSTATUS_MEMUSED
          **
          ^This is the approximate number of bytes of heap memory @@ -9520,9 +9449,9 @@ struct sqlite3_pcache_page { ** SQLite will typically create one cache instance for each open database file, ** though this is not guaranteed. ^The ** first parameter, szPage, is the size in bytes of the pages that must -** be allocated by the cache. ^szPage will always be a power of two. ^The +** be allocated by the cache. ^szPage will always a power of two. ^The ** second parameter szExtra is a number of bytes of extra storage -** associated with each page cache entry. ^The szExtra parameter will be +** associated with each page cache entry. ^The szExtra parameter will ** a number less than 250. SQLite will use the ** extra szExtra bytes on each page to store metadata about the underlying ** database page on disk. The value passed into szExtra depends @@ -9530,17 +9459,17 @@ struct sqlite3_pcache_page { ** ^The third argument to xCreate(), bPurgeable, is true if the cache being ** created will be used to cache database pages of a file stored on disk, or ** false if it is used for an in-memory database. The cache implementation -** does not have to do anything special based upon the value of bPurgeable; +** does not have to do anything special based with the value of bPurgeable; ** it is purely advisory. ^On a cache where bPurgeable is false, SQLite will ** never invoke xUnpin() except to deliberately delete a page. ** ^In other words, calls to xUnpin() on a cache with bPurgeable set to ** false will always have the "discard" flag set to true. -** ^Hence, a cache created with bPurgeable set to false will +** ^Hence, a cache created with bPurgeable false will ** never contain any unpinned pages. ** ** [[the xCachesize() page cache method]] ** ^(The xCachesize() method may be called at any time by SQLite to set the -** suggested maximum cache-size (number of pages stored) for the cache +** suggested maximum cache-size (number of pages stored by) the cache ** instance passed as the first argument. This is the value configured using ** the SQLite "[PRAGMA cache_size]" command.)^ As with the bPurgeable ** parameter, the implementation is not required to do anything with this @@ -9567,12 +9496,12 @@ struct sqlite3_pcache_page { ** implementation must return a pointer to the page buffer with its content ** intact. If the requested page is not already in the cache, then the ** cache implementation should use the value of the createFlag -** parameter to help it determine what action to take: +** parameter to help it determined what action to take: ** ** **
          createFlag Behavior when page is not already in cache **
          0 Do not allocate a new page. Return NULL. -**
          1 Allocate a new page if it is easy and convenient to do so. +**
          1 Allocate a new page if it easy and convenient to do so. ** Otherwise return NULL. **
          2 Make every effort to allocate a new page. Only return ** NULL if allocating a new page is effectively impossible. @@ -9589,7 +9518,7 @@ struct sqlite3_pcache_page { ** as its second argument. If the third parameter, discard, is non-zero, ** then the page must be evicted from the cache. ** ^If the discard parameter is -** zero, then the page may be discarded or retained at the discretion of the +** zero, then the page may be discarded or retained at the discretion of ** page cache implementation. ^The page cache implementation ** may choose to evict unpinned pages at any time. ** @@ -9607,7 +9536,7 @@ struct sqlite3_pcache_page { ** When SQLite calls the xTruncate() method, the cache must discard all ** existing cache entries with page numbers (keys) greater than or equal ** to the value of the iLimit parameter passed to xTruncate(). If any -** of these pages are pinned, they become implicitly unpinned, meaning that +** of these pages are pinned, they are implicitly unpinned, meaning that ** they can be safely discarded. ** ** [[the xDestroy() page cache method]] @@ -9787,7 +9716,7 @@ typedef struct sqlite3_backup sqlite3_backup; ** external process or via a database connection other than the one being ** used by the backup operation, then the backup will be automatically ** restarted by the next call to sqlite3_backup_step(). ^If the source -** database is modified by using the same database connection as is used +** database is modified by the using the same database connection as is used ** by the backup operation, then the backup database is automatically ** updated at the same time. ** @@ -9804,7 +9733,7 @@ typedef struct sqlite3_backup sqlite3_backup; ** and may not be used following a call to sqlite3_backup_finish(). ** ** ^The value returned by sqlite3_backup_finish is [SQLITE_OK] if no -** sqlite3_backup_step() errors occurred, regardless of whether or not +** sqlite3_backup_step() errors occurred, regardless or whether or not ** sqlite3_backup_step() completed. ** ^If an out-of-memory condition or IO error occurred during any prior ** sqlite3_backup_step() call on the same [sqlite3_backup] object, then @@ -9906,7 +9835,7 @@ SQLITE_API int sqlite3_backup_pagecount(sqlite3_backup *p); ** application receives an SQLITE_LOCKED error, it may call the ** sqlite3_unlock_notify() method with the blocked connection handle as ** the first argument to register for a callback that will be invoked -** when the blocking connection's current transaction is concluded. ^The +** when the blocking connections current transaction is concluded. ^The ** callback is invoked from within the [sqlite3_step] or [sqlite3_close] ** call that concludes the blocking connection's transaction. ** @@ -9926,7 +9855,7 @@ SQLITE_API int sqlite3_backup_pagecount(sqlite3_backup *p); ** blocked connection already has a registered unlock-notify callback, ** then the new callback replaces the old.)^ ^If sqlite3_unlock_notify() is ** called with a NULL pointer as its second argument, then any existing -** unlock-notify callback is canceled. ^The blocked connection's +** unlock-notify callback is canceled. ^The blocked connections ** unlock-notify callback may also be canceled by closing the blocked ** connection using [sqlite3_close()]. ** @@ -10324,7 +10253,7 @@ SQLITE_API int sqlite3_vtab_config(sqlite3*, int op, ...); ** support constraints. In this configuration (which is the default) if ** a call to the [xUpdate] method returns [SQLITE_CONSTRAINT], then the entire ** statement is rolled back as if [ON CONFLICT | OR ABORT] had been -** specified as part of the user's SQL statement, regardless of the actual +** specified as part of the users SQL statement, regardless of the actual ** ON CONFLICT mode specified. ** ** If X is non-zero, then the virtual table implementation guarantees @@ -10358,7 +10287,7 @@ SQLITE_API int sqlite3_vtab_config(sqlite3*, int op, ...); ** [[SQLITE_VTAB_INNOCUOUS]]
          SQLITE_VTAB_INNOCUOUS
          **
          Calls of the form ** [sqlite3_vtab_config](db,SQLITE_VTAB_INNOCUOUS) from within the -** [xConnect] or [xCreate] methods of a [virtual table] implementation +** the [xConnect] or [xCreate] methods of a [virtual table] implementation ** identify that virtual table as being safe to use from within triggers ** and views. Conceptually, the SQLITE_VTAB_INNOCUOUS tag means that the ** virtual table can do no serious harm even if it is controlled by a @@ -10526,7 +10455,7 @@ SQLITE_API const char *sqlite3_vtab_collation(sqlite3_index_info*,int); **
          ** ** ^For the purposes of comparing virtual table output values to see if the -** values are the same value for sorting purposes, two NULL values are considered +** values are same value for sorting purposes, two NULL values are considered ** to be the same. In other words, the comparison operator is "IS" ** (or "IS NOT DISTINCT FROM") and not "==". ** @@ -10536,7 +10465,7 @@ SQLITE_API const char *sqlite3_vtab_collation(sqlite3_index_info*,int); ** ** ^A virtual table implementation is always free to return rows in any order ** it wants, as long as the "orderByConsumed" flag is not set. ^When the -** "orderByConsumed" flag is unset, the query planner will add extra +** the "orderByConsumed" flag is unset, the query planner will add extra ** [bytecode] to ensure that the final results returned by the SQL query are ** ordered correctly. The use of the "orderByConsumed" flag and the ** sqlite3_vtab_distinct() interface is merely an optimization. ^Careful @@ -10633,7 +10562,7 @@ SQLITE_API int sqlite3_vtab_in(sqlite3_index_info*, int iCons, int bHandle); ** sqlite3_vtab_in_next(X,P) should be one of the parameters to the ** xFilter method which invokes these routines, and specifically ** a parameter that was previously selected for all-at-once IN constraint -** processing using the [sqlite3_vtab_in()] interface in the +** processing use the [sqlite3_vtab_in()] interface in the ** [xBestIndex|xBestIndex method]. ^(If the X parameter is not ** an xFilter argument that was selected for all-at-once IN constraint ** processing, then these routines return [SQLITE_ERROR].)^ @@ -10688,7 +10617,7 @@ SQLITE_API int sqlite3_vtab_in_next(sqlite3_value *pVal, sqlite3_value **ppOut); ** and only if *V is set to a value. ^The sqlite3_vtab_rhs_value(P,J,V) ** inteface returns SQLITE_NOTFOUND if the right-hand side of the J-th ** constraint is not available. ^The sqlite3_vtab_rhs_value() interface -** can return a result code other than SQLITE_OK or SQLITE_NOTFOUND if +** can return an result code other than SQLITE_OK or SQLITE_NOTFOUND if ** something goes wrong. ** ** The sqlite3_vtab_rhs_value() interface is usually only successful if @@ -10716,8 +10645,8 @@ SQLITE_API int sqlite3_vtab_rhs_value(sqlite3_index_info*, int, sqlite3_value ** ** KEYWORDS: {conflict resolution mode} ** ** These constants are returned by [sqlite3_vtab_on_conflict()] to -** inform a [virtual table] implementation of the [ON CONFLICT] mode -** for the SQL statement being evaluated. +** inform a [virtual table] implementation what the [ON CONFLICT] mode +** is for the SQL statement being evaluated. ** ** Note that the [SQLITE_IGNORE] constant is also used as a potential ** return value from the [sqlite3_set_authorizer()] callback and that @@ -10757,39 +10686,39 @@ SQLITE_API int sqlite3_vtab_rhs_value(sqlite3_index_info*, int, sqlite3_value ** ** [[SQLITE_SCANSTAT_EST]]
          SQLITE_SCANSTAT_EST
          **
          ^The "double" variable pointed to by the V parameter will be set to the ** query planner's estimate for the average number of rows output from each -** iteration of the X-th loop. If the query planner's estimate was accurate, +** iteration of the X-th loop. If the query planner's estimates was accurate, ** then this value will approximate the quotient NVISIT/NLOOP and the ** product of this value for all prior loops with the same SELECTID will -** be the NLOOP value for the current loop.
          +** be the NLOOP value for the current loop. ** ** [[SQLITE_SCANSTAT_NAME]]
          SQLITE_SCANSTAT_NAME
          **
          ^The "const char *" variable pointed to by the V parameter will be set ** to a zero-terminated UTF-8 string containing the name of the index or table -** used for the X-th loop.
          +** used for the X-th loop. ** ** [[SQLITE_SCANSTAT_EXPLAIN]]
          SQLITE_SCANSTAT_EXPLAIN
          **
          ^The "const char *" variable pointed to by the V parameter will be set ** to a zero-terminated UTF-8 string containing the [EXPLAIN QUERY PLAN] -** description for the X-th loop.
          +** description for the X-th loop. ** ** [[SQLITE_SCANSTAT_SELECTID]]
          SQLITE_SCANSTAT_SELECTID
          **
          ^The "int" variable pointed to by the V parameter will be set to the ** id for the X-th query plan element. The id value is unique within the ** statement. The select-id is the same value as is output in the first -** column of an [EXPLAIN QUERY PLAN] query.
          +** column of an [EXPLAIN QUERY PLAN] query. ** ** [[SQLITE_SCANSTAT_PARENTID]]
          SQLITE_SCANSTAT_PARENTID
          **
          The "int" variable pointed to by the V parameter will be set to the -** id of the parent of the current query element, if applicable, or +** the id of the parent of the current query element, if applicable, or ** to zero if the query element has no parent. This is the same value as -** returned in the second column of an [EXPLAIN QUERY PLAN] query.
          +** returned in the second column of an [EXPLAIN QUERY PLAN] query. ** ** [[SQLITE_SCANSTAT_NCYCLE]]
          SQLITE_SCANSTAT_NCYCLE
          **
          The sqlite3_int64 output value is set to the number of cycles, ** according to the processor time-stamp counter, that elapsed while the ** query element was being processed. This value is not available for ** all query elements - if it is unavailable the output variable is -** set to -1.
          +** set to -1. ** */ #define SQLITE_SCANSTAT_NLOOP 0 @@ -10830,8 +10759,8 @@ SQLITE_API int sqlite3_vtab_rhs_value(sqlite3_index_info*, int, sqlite3_value ** ** sqlite3_stmt_scanstatus_v2() with a zeroed flags parameter. ** ** Parameter "idx" identifies the specific query element to retrieve statistics -** for. Query elements are numbered starting from zero. A value of -1 may -** retrieve statistics for the entire query. ^If idx is out of range +** for. Query elements are numbered starting from zero. A value of -1 may be +** to query for statistics regarding the entire query. ^If idx is out of range ** - less than -1 or greater than or equal to the total number of query ** elements used to implement the statement - a non-zero value is returned and ** the variable that pOut points to is unchanged. @@ -10874,7 +10803,7 @@ SQLITE_API void sqlite3_stmt_scanstatus_reset(sqlite3_stmt*); ** METHOD: sqlite3 ** ** ^If a write-transaction is open on [database connection] D when the -** [sqlite3_db_cacheflush(D)] interface is invoked, any dirty +** [sqlite3_db_cacheflush(D)] interface invoked, any dirty ** pages in the pager-cache that are not currently in use are written out ** to disk. A dirty page may be in use if a database cursor created by an ** active SQL statement is reading from it, or if it is page 1 of a database @@ -10988,8 +10917,8 @@ SQLITE_API int sqlite3_db_cacheflush(sqlite3*); ** triggers; and so forth. ** ** When the [sqlite3_blob_write()] API is used to update a blob column, -** the pre-update hook is invoked with SQLITE_DELETE, because -** the new values are not yet available. In this case, when a +** the pre-update hook is invoked with SQLITE_DELETE. This is because the +** in this case the new values are not available. In this case, when a ** callback made with op==SQLITE_DELETE is actually a write using the ** sqlite3_blob_write() API, the [sqlite3_preupdate_blobwrite()] returns ** the index of the column being written. In other cases, where the @@ -11242,7 +11171,7 @@ SQLITE_API SQLITE_EXPERIMENTAL int sqlite3_snapshot_recover(sqlite3 *db, const c ** For an ordinary on-disk database file, the serialization is just a ** copy of the disk file. For an in-memory database or a "TEMP" database, ** the serialization is the same sequence of bytes which would be written -** to disk if that database were backed up to disk. +** to disk if that database where backed up to disk. ** ** The usual case is that sqlite3_serialize() copies the serialization of ** the database into memory obtained from [sqlite3_malloc64()] and returns @@ -11251,7 +11180,7 @@ SQLITE_API SQLITE_EXPERIMENTAL int sqlite3_snapshot_recover(sqlite3 *db, const c ** contains the SQLITE_SERIALIZE_NOCOPY bit, then no memory allocations ** are made, and the sqlite3_serialize() function will return a pointer ** to the contiguous memory representation of the database that SQLite -** is currently using for that database, or NULL if no such contiguous +** is currently using for that database, or NULL if the no such contiguous ** memory representation of the database exists. A contiguous memory ** representation of the database will usually only exist if there has ** been a prior call to [sqlite3_deserialize(D,S,...)] with the same @@ -11322,7 +11251,7 @@ SQLITE_API unsigned char *sqlite3_serialize( ** database is currently in a read transaction or is involved in a backup ** operation. ** -** It is not possible to deserialize into the TEMP database. If the +** It is not possible to deserialized into the TEMP database. If the ** S argument to sqlite3_deserialize(D,S,P,N,M,F) is "temp" then the ** function returns SQLITE_ERROR. ** @@ -11344,7 +11273,7 @@ SQLITE_API int sqlite3_deserialize( sqlite3 *db, /* The database connection */ const char *zSchema, /* Which DB to reopen with the deserialization */ unsigned char *pData, /* The serialized database content */ - sqlite3_int64 szDb, /* Number of bytes in the deserialization */ + sqlite3_int64 szDb, /* Number bytes in the deserialization */ sqlite3_int64 szBuf, /* Total size of buffer pData[] */ unsigned mFlags /* Zero or more SQLITE_DESERIALIZE_* flags */ ); @@ -11352,7 +11281,7 @@ SQLITE_API int sqlite3_deserialize( /* ** CAPI3REF: Flags for sqlite3_deserialize() ** -** The following are allowed values for the 6th argument (the F argument) to +** The following are allowed values for 6th argument (the F argument) to ** the [sqlite3_deserialize(D,S,P,N,M,F)] interface. ** ** The SQLITE_DESERIALIZE_FREEONCLOSE means that the database serialization @@ -11877,10 +11806,9 @@ SQLITE_API void sqlite3session_table_filter( ** is inserted while a session object is enabled, then later deleted while ** the same session object is disabled, no INSERT record will appear in the ** changeset, even though the delete took place while the session was disabled. -** Or, if one field of a row is updated while a session is enabled, and -** then another field of the same row is updated while the session is disabled, -** the resulting changeset will contain an UPDATE change that updates both -** fields. +** Or, if one field of a row is updated while a session is disabled, and +** another field of the same row is updated while the session is enabled, the +** resulting changeset will contain an UPDATE change that updates both fields. */ SQLITE_API int sqlite3session_changeset( sqlite3_session *pSession, /* Session object */ @@ -11952,9 +11880,8 @@ SQLITE_API sqlite3_int64 sqlite3session_changeset_size(sqlite3_session *pSession ** database zFrom the contents of the two compatible tables would be ** identical. ** -** Unless the call to this function is a no-op as described above, it is an -** error if database zFrom does not exist or does not contain the required -** compatible table. +** It an error if database zFrom does not exist or does not contain the +** required compatible table. ** ** If the operation is successful, SQLITE_OK is returned. Otherwise, an SQLite ** error code. In this case, if argument pzErrMsg is not NULL, *pzErrMsg @@ -12089,7 +12016,7 @@ SQLITE_API int sqlite3changeset_start_v2( ** The following flags may passed via the 4th parameter to ** [sqlite3changeset_start_v2] and [sqlite3changeset_start_v2_strm]: ** -**
          SQLITE_CHANGESETSTART_INVERT
          +**
          SQLITE_CHANGESETAPPLY_INVERT
          ** Invert the changeset while iterating through it. This is equivalent to ** inverting a changeset using sqlite3changeset_invert() before applying it. ** It is an error to specify this flag with a patchset. @@ -12404,6 +12331,19 @@ SQLITE_API int sqlite3changeset_concat( void **ppOut /* OUT: Buffer containing output changeset */ ); + +/* +** CAPI3REF: Upgrade the Schema of a Changeset/Patchset +*/ +SQLITE_API int sqlite3changeset_upgrade( + sqlite3 *db, + const char *zDb, + int nIn, const void *pIn, /* Input changeset */ + int *pnOut, void **ppOut /* OUT: Inverse of input */ +); + + + /* ** CAPI3REF: Changegroup Handle ** @@ -14151,22 +14091,14 @@ struct fts5_api { ** * Terms in the GROUP BY or ORDER BY clauses of a SELECT statement. ** * Terms in the VALUES clause of an INSERT statement ** -** The hard upper limit here is 32767. Most database people will +** The hard upper limit here is 32676. Most database people will ** tell you that in a well-normalized database, you usually should ** not have more than a dozen or so columns in any table. And if ** that is the case, there is no point in having more than a few ** dozen values in any of the other situations described above. -** -** An index can only have SQLITE_MAX_COLUMN columns from the user -** point of view, but the underlying b-tree that implements the index -** might have up to twice as many columns in a WITHOUT ROWID table, -** since must also store the primary key at the end. Hence the -** column count for Index is u16 instead of i16. */ -#if !defined(SQLITE_MAX_COLUMN) +#ifndef SQLITE_MAX_COLUMN # define SQLITE_MAX_COLUMN 2000 -#elif SQLITE_MAX_COLUMN>32767 -# error SQLITE_MAX_COLUMN may not exceed 32767 #endif /* @@ -14818,7 +14750,6 @@ struct HashElem { HashElem *next, *prev; /* Next and previous elements in the table */ void *data; /* Data associated with this element */ const char *pKey; /* Key associated with this element */ - unsigned int h; /* hash for pKey */ }; /* @@ -15179,17 +15110,7 @@ SQLITE_PRIVATE void sqlite3HashClear(Hash*); ** ourselves. */ #ifndef offsetof -#define offsetof(STRUCTURE,FIELD) ((size_t)((char*)&((STRUCTURE*)0)->FIELD)) -#endif - -/* -** Work around C99 "flex-array" syntax for pre-C99 compilers, so as -** to avoid complaints from -fsanitize=strict-bounds. -*/ -#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) -# define FLEXARRAY -#else -# define FLEXARRAY 1 +#define offsetof(STRUCTURE,FIELD) ((int)((char*)&((STRUCTURE*)0)->FIELD)) #endif /* @@ -15267,11 +15188,6 @@ typedef INT16_TYPE i16; /* 2-byte signed integer */ typedef UINT8_TYPE u8; /* 1-byte unsigned integer */ typedef INT8_TYPE i8; /* 1-byte signed integer */ -/* A bitfield type for use inside of structures. Always follow with :N where -** N is the number of bits. -*/ -typedef unsigned bft; /* Bit Field Type */ - /* ** SQLITE_MAX_U32 is a u64 constant that is the maximum u64 value ** that can be stored in a u32 without loss of data. The value @@ -15440,14 +15356,6 @@ typedef INT16_TYPE LogEst; #define LARGEST_UINT64 (0xffffffff|(((u64)0xffffffff)<<32)) #define SMALLEST_INT64 (((i64)-1) - LARGEST_INT64) -/* -** Macro SMXV(n) return the maximum value that can be held in variable n, -** assuming n is a signed integer type. UMXV(n) is similar for unsigned -** integer types. -*/ -#define SMXV(n) ((((i64)1)<<(sizeof(n)*8-1))-1) -#define UMXV(n) ((((i64)1)<<(sizeof(n)*8))-1) - /* ** Round up a number to the next larger multiple of 8. This is used ** to force 8-byte alignment on 64-bit architectures. @@ -17424,8 +17332,8 @@ SQLITE_PRIVATE int sqlite3NotPureFunc(sqlite3_context*); SQLITE_PRIVATE int sqlite3VdbeBytecodeVtabInit(sqlite3*); #endif -/* Use SQLITE_ENABLE_EXPLAIN_COMMENTS to enable generation of extra -** comments on each VDBE opcode. +/* Use SQLITE_ENABLE_COMMENTS to enable generation of extra comments on +** each VDBE opcode. ** ** Use the SQLITE_ENABLE_MODULE_COMMENTS macro to see some extra no-op ** comments in VDBE programs that show key decision points in the code @@ -18148,10 +18056,6 @@ struct sqlite3 { Savepoint *pSavepoint; /* List of active savepoints */ int nAnalysisLimit; /* Number of index rows to ANALYZE */ int busyTimeout; /* Busy handler timeout, in msec */ -#ifdef SQLITE_ENABLE_SETLK_TIMEOUT - int setlkTimeout; /* Blocking lock timeout, in msec. -1 -> inf. */ - int setlkFlags; /* Flags passed to setlk_timeout() */ -#endif int nSavepoint; /* Number of non-transaction savepoints */ int nStatement; /* Number of nested statement-transactions */ i64 nDeferredCons; /* Net deferred constraints this transaction. */ @@ -18706,7 +18610,6 @@ struct CollSeq { #define SQLITE_AFF_INTEGER 0x44 /* 'D' */ #define SQLITE_AFF_REAL 0x45 /* 'E' */ #define SQLITE_AFF_FLEXNUM 0x46 /* 'F' */ -#define SQLITE_AFF_DEFER 0x58 /* 'X' - defer computation until later */ #define sqlite3IsNumericAffinity(X) ((X)>=SQLITE_AFF_NUMERIC) @@ -18831,7 +18734,6 @@ struct Table { } u; Trigger *pTrigger; /* List of triggers on this object */ Schema *pSchema; /* Schema that contains this table */ - u8 aHx[16]; /* Column aHt[K%sizeof(aHt)] might have hash K */ }; /* @@ -18965,13 +18867,9 @@ struct FKey { struct sColMap { /* Mapping of columns in pFrom to columns in zTo */ int iFrom; /* Index of column in pFrom */ char *zCol; /* Name of column in zTo. If NULL use PRIMARY KEY */ - } aCol[FLEXARRAY]; /* One entry for each of nCol columns */ + } aCol[1]; /* One entry for each of nCol columns */ }; -/* The size (in bytes) of an FKey object holding N columns. The answer -** does NOT include space to hold the zTo name. */ -#define SZ_FKEY(N) (offsetof(FKey,aCol)+(N)*sizeof(struct sColMap)) - /* ** SQLite supports many different ways to resolve a constraint ** error. ROLLBACK processing means that a constraint violation @@ -19033,12 +18931,9 @@ struct KeyInfo { u16 nAllField; /* Total columns, including key plus others */ sqlite3 *db; /* The database connection */ u8 *aSortFlags; /* Sort order for each column. */ - CollSeq *aColl[FLEXARRAY]; /* Collating sequence for each term of the key */ + CollSeq *aColl[1]; /* Collating sequence for each term of the key */ }; -/* The size (in bytes) of a KeyInfo object with up to N fields */ -#define SZ_KEYINFO(N) (offsetof(KeyInfo,aColl) + (N)*sizeof(CollSeq*)) - /* ** Allowed bit values for entries in the KeyInfo.aSortFlags[] array. */ @@ -19158,7 +19053,7 @@ struct Index { Pgno tnum; /* DB Page containing root of this index */ LogEst szIdxRow; /* Estimated average row size in bytes */ u16 nKeyCol; /* Number of columns forming the key */ - u16 nColumn; /* Nr columns in btree. Can be 2*Table.nCol */ + u16 nColumn; /* Number of columns stored in the index */ u8 onError; /* OE_Abort, OE_Ignore, OE_Replace, or OE_None */ unsigned idxType:2; /* 0:Normal 1:UNIQUE, 2:PRIMARY KEY, 3:IPK */ unsigned bUnordered:1; /* Use this index for == or IN queries only */ @@ -19167,6 +19062,7 @@ struct Index { unsigned isCovering:1; /* True if this is a covering index */ unsigned noSkipScan:1; /* Do not try to use skip-scan if true */ unsigned hasStat1:1; /* aiRowLogEst values come from sqlite_stat1 */ + unsigned bLowQual:1; /* sqlite_stat1 says this is a low-quality index */ unsigned bNoQuery:1; /* Do not use this index to optimize queries */ unsigned bAscKeyBug:1; /* True if the bba7b69f9849b5bf bug applies */ unsigned bHasVCol:1; /* Index references one or more VIRTUAL columns */ @@ -19256,7 +19152,7 @@ struct AggInfo { ** from source tables rather than from accumulators */ u8 useSortingIdx; /* In direct mode, reference the sorting index rather ** than the source table */ - u32 nSortingColumn; /* Number of columns in the sorting index */ + u16 nSortingColumn; /* Number of columns in the sorting index */ int sortingIdx; /* Cursor number of the sorting index */ int sortingIdxPTab; /* Cursor number of pseudo-table */ int iFirstReg; /* First register in range for aCol[] and aFunc[] */ @@ -19265,8 +19161,8 @@ struct AggInfo { Table *pTab; /* Source table */ Expr *pCExpr; /* The original expression */ int iTable; /* Cursor number of the source table */ - int iColumn; /* Column number within the source table */ - int iSorterColumn; /* Column number in the sorting index */ + i16 iColumn; /* Column number within the source table */ + i16 iSorterColumn; /* Column number in the sorting index */ } *aCol; int nColumn; /* Number of used entries in aCol[] */ int nAccumulator; /* Number of columns that show through to the output. @@ -19495,10 +19391,10 @@ struct Expr { /* Macros can be used to test, set, or clear bits in the ** Expr.flags field. */ -#define ExprHasProperty(E,P) (((E)->flags&(u32)(P))!=0) -#define ExprHasAllProperty(E,P) (((E)->flags&(u32)(P))==(u32)(P)) -#define ExprSetProperty(E,P) (E)->flags|=(u32)(P) -#define ExprClearProperty(E,P) (E)->flags&=~(u32)(P) +#define ExprHasProperty(E,P) (((E)->flags&(P))!=0) +#define ExprHasAllProperty(E,P) (((E)->flags&(P))==(P)) +#define ExprSetProperty(E,P) (E)->flags|=(P) +#define ExprClearProperty(E,P) (E)->flags&=~(P) #define ExprAlwaysTrue(E) (((E)->flags&(EP_OuterON|EP_IsTrue))==EP_IsTrue) #define ExprAlwaysFalse(E) (((E)->flags&(EP_OuterON|EP_IsFalse))==EP_IsFalse) #define ExprIsFullSize(E) (((E)->flags&(EP_Reduced|EP_TokenOnly))==0) @@ -19610,14 +19506,9 @@ struct ExprList { int iConstExprReg; /* Register in which Expr value is cached. Used only ** by Parse.pConstExpr */ } u; - } a[FLEXARRAY]; /* One slot for each expression in the list */ + } a[1]; /* One slot for each expression in the list */ }; -/* The size (in bytes) of an ExprList object that is big enough to hold -** as many as N expressions. */ -#define SZ_EXPRLIST(N) \ - (offsetof(ExprList,a) + (N)*sizeof(struct ExprList_item)) - /* ** Allowed values for Expr.a.eEName */ @@ -19645,12 +19536,9 @@ struct IdList { int nId; /* Number of identifiers on the list */ struct IdList_item { char *zName; /* Name of the identifier */ - } a[FLEXARRAY]; + } a[1]; }; -/* The size (in bytes) of an IdList object that can hold up to N IDs. */ -#define SZ_IDLIST(N) (offsetof(IdList,a)+(N)*sizeof(struct IdList_item)) - /* ** Allowed values for IdList.eType, which determines which value of the a.u4 ** is valid. @@ -19770,19 +19658,11 @@ struct OnOrUsing { ** */ struct SrcList { - int nSrc; /* Number of tables or subqueries in the FROM clause */ - u32 nAlloc; /* Number of entries allocated in a[] below */ - SrcItem a[FLEXARRAY]; /* One entry for each identifier on the list */ + int nSrc; /* Number of tables or subqueries in the FROM clause */ + u32 nAlloc; /* Number of entries allocated in a[] below */ + SrcItem a[1]; /* One entry for each identifier on the list */ }; -/* Size (in bytes) of a SrcList object that can hold as many as N -** SrcItem objects. */ -#define SZ_SRCLIST(N) (offsetof(SrcList,a)+(N)*sizeof(SrcItem)) - -/* Size (in bytes( of a SrcList object that holds 1 SrcItem. This is a -** special case of SZ_SRCITEM(1) that comes up often. */ -#define SZ_SRCLIST_1 (offsetof(SrcList,a)+sizeof(SrcItem)) - /* ** Permitted values of the SrcList.a.jointype field */ @@ -20251,32 +20131,25 @@ struct Parse { char *zErrMsg; /* An error message */ Vdbe *pVdbe; /* An engine for executing database bytecode */ int rc; /* Return code from execution */ - LogEst nQueryLoop; /* Est number of iterations of a query (10*log2(N)) */ + u8 colNamesSet; /* TRUE after OP_ColumnName has been issued to pVdbe */ + u8 checkSchema; /* Causes schema cookie check after an error */ u8 nested; /* Number of nested calls to the parser/code generator */ u8 nTempReg; /* Number of temporary registers in aTempReg[] */ u8 isMultiWrite; /* True if statement may modify/insert multiple rows */ u8 mayAbort; /* True if statement may throw an ABORT exception */ u8 hasCompound; /* Need to invoke convertCompoundSelectToSubquery() */ + u8 okConstFactor; /* OK to factor out constants */ u8 disableLookaside; /* Number of times lookaside has been disabled */ u8 prepFlags; /* SQLITE_PREPARE_* flags */ u8 withinRJSubrtn; /* Nesting level for RIGHT JOIN body subroutines */ + u8 bHasWith; /* True if statement contains WITH */ u8 mSubrtnSig; /* mini Bloom filter on available SubrtnSig.selId */ - u8 eTriggerOp; /* TK_UPDATE, TK_INSERT or TK_DELETE */ - u8 bReturning; /* Coding a RETURNING trigger */ - u8 eOrconf; /* Default ON CONFLICT policy for trigger steps */ - u8 disableTriggers; /* True to disable triggers */ #if defined(SQLITE_DEBUG) || defined(SQLITE_COVERAGE_TEST) u8 earlyCleanup; /* OOM inside sqlite3ParserAddCleanup() */ #endif #ifdef SQLITE_DEBUG u8 ifNotExists; /* Might be true if IF NOT EXISTS. Assert()s only */ - u8 isCreate; /* CREATE TABLE, INDEX, or VIEW (but not TRIGGER) - ** and ALTER TABLE ADD COLUMN. */ #endif - bft colNamesSet :1; /* TRUE after OP_ColumnName has been issued to pVdbe */ - bft bHasWith :1; /* True if statement contains WITH */ - bft okConstFactor :1; /* OK to factor out constants */ - bft checkSchema :1; /* Causes schema cookie check after an error */ int nRangeReg; /* Size of the temporary register block */ int iRangeReg; /* First register in temporary register block */ int nErr; /* Number of errors seen */ @@ -20291,9 +20164,12 @@ struct Parse { ExprList *pConstExpr;/* Constant expressions */ IndexedExpr *pIdxEpr;/* List of expressions used by active indexes */ IndexedExpr *pIdxPartExpr; /* Exprs constrained by index WHERE clauses */ + Token constraintName;/* Name of the constraint currently being parsed */ yDbMask writeMask; /* Start a write transaction on these databases */ yDbMask cookieMask; /* Bitmask of schema verified databases */ - int nMaxArg; /* Max args to xUpdate and xFilter vtab methods */ + int regRowid; /* Register holding rowid of CREATE TABLE entry */ + int regRoot; /* Register holding root page number for new objects */ + int nMaxArg; /* Max args passed to user function by sub-program */ int nSelect; /* Number of SELECT stmts. Counter for Select.selId */ #ifndef SQLITE_OMIT_PROGRESS_CALLBACK u32 nProgressSteps; /* xProgress steps taken during sqlite3_prepare() */ @@ -20307,6 +20183,17 @@ struct Parse { Table *pTriggerTab; /* Table triggers are being coded for */ TriggerPrg *pTriggerPrg; /* Linked list of coded triggers */ ParseCleanup *pCleanup; /* List of cleanup operations to run after parse */ + union { + int addrCrTab; /* Address of OP_CreateBtree on CREATE TABLE */ + Returning *pReturning; /* The RETURNING clause */ + } u1; + u32 oldmask; /* Mask of old.* columns referenced */ + u32 newmask; /* Mask of new.* columns referenced */ + LogEst nQueryLoop; /* Est number of iterations of a query (10*log2(N)) */ + u8 eTriggerOp; /* TK_UPDATE, TK_INSERT or TK_DELETE */ + u8 bReturning; /* Coding a RETURNING trigger */ + u8 eOrconf; /* Default ON CONFLICT policy for trigger steps */ + u8 disableTriggers; /* True to disable triggers */ /************************************************************************** ** Fields above must be initialized to zero. The fields that follow, @@ -20318,19 +20205,6 @@ struct Parse { int aTempReg[8]; /* Holding area for temporary registers */ Parse *pOuterParse; /* Outer Parse object when nested */ Token sNameToken; /* Token with unqualified schema object name */ - u32 oldmask; /* Mask of old.* columns referenced */ - u32 newmask; /* Mask of new.* columns referenced */ - union { - struct { /* These fields available when isCreate is true */ - int addrCrTab; /* Address of OP_CreateBtree on CREATE TABLE */ - int regRowid; /* Register holding rowid of CREATE TABLE entry */ - int regRoot; /* Register holding root page for new objects */ - Token constraintName; /* Name of the constraint currently being parsed */ - } cr; - struct { /* These fields available to all other statements */ - Returning *pReturning; /* The RETURNING clause */ - } d; - } u1; /************************************************************************ ** Above is constant between recursions. Below is reset before and after @@ -20846,13 +20720,9 @@ struct With { int nCte; /* Number of CTEs in the WITH clause */ int bView; /* Belongs to the outermost Select of a view */ With *pOuter; /* Containing WITH clause, or NULL */ - Cte a[FLEXARRAY]; /* For each CTE in the WITH clause.... */ + Cte a[1]; /* For each CTE in the WITH clause.... */ }; -/* The size (in bytes) of a With object that can hold as many -** as N different CTEs. */ -#define SZ_WITH(N) (offsetof(With,a) + (N)*sizeof(Cte)) - /* ** The Cte object is not guaranteed to persist for the entire duration ** of code generation. (The query flattener or other parser tree @@ -20881,13 +20751,9 @@ struct DbClientData { DbClientData *pNext; /* Next in a linked list */ void *pData; /* The data */ void (*xDestructor)(void*); /* Destructor. Might be NULL */ - char zName[FLEXARRAY]; /* Name of this client data. MUST BE LAST */ + char zName[1]; /* Name of this client data. MUST BE LAST */ }; -/* The size (in bytes) of a DbClientData object that can has a name -** that is N bytes long, including the zero-terminator. */ -#define SZ_DBCLIENTDATA(N) (offsetof(DbClientData,zName)+(N)) - #ifdef SQLITE_DEBUG /* ** An instance of the TreeView object is used for printing the content of @@ -21330,7 +21196,7 @@ SQLITE_PRIVATE void sqlite3SubqueryColumnTypes(Parse*,Table*,Select*,char); SQLITE_PRIVATE Table *sqlite3ResultSetOfSelect(Parse*,Select*,char); SQLITE_PRIVATE void sqlite3OpenSchemaTable(Parse *, int); SQLITE_PRIVATE Index *sqlite3PrimaryKeyIndex(Table*); -SQLITE_PRIVATE int sqlite3TableColumnToIndex(Index*, int); +SQLITE_PRIVATE i16 sqlite3TableColumnToIndex(Index*, i16); #ifdef SQLITE_OMIT_GENERATED_COLUMNS # define sqlite3TableColumnToStorage(T,X) (X) /* No-op pass-through */ # define sqlite3StorageColumnToTable(T,X) (X) /* No-op pass-through */ @@ -21428,7 +21294,7 @@ SQLITE_PRIVATE void sqlite3SrcListAssignCursors(Parse*, SrcList*); SQLITE_PRIVATE void sqlite3IdListDelete(sqlite3*, IdList*); SQLITE_PRIVATE void sqlite3ClearOnOrUsing(sqlite3*, OnOrUsing*); SQLITE_PRIVATE void sqlite3SrcListDelete(sqlite3*, SrcList*); -SQLITE_PRIVATE Index *sqlite3AllocateIndexObject(sqlite3*,int,int,char**); +SQLITE_PRIVATE Index *sqlite3AllocateIndexObject(sqlite3*,i16,int,char**); SQLITE_PRIVATE void sqlite3CreateIndex(Parse*,Token*,Token*,SrcList*,ExprList*,int,Token*, Expr*, int, int, u8); SQLITE_PRIVATE void sqlite3DropIndex(Parse*, SrcList*, int); @@ -21564,8 +21430,7 @@ SQLITE_PRIVATE Select *sqlite3SelectDup(sqlite3*,const Select*,int); SQLITE_PRIVATE FuncDef *sqlite3FunctionSearch(int,const char*); SQLITE_PRIVATE void sqlite3InsertBuiltinFuncs(FuncDef*,int); SQLITE_PRIVATE FuncDef *sqlite3FindFunction(sqlite3*,const char*,int,u8,u8); -SQLITE_PRIVATE void sqlite3QuoteValue(StrAccum*,sqlite3_value*,int); -SQLITE_PRIVATE int sqlite3AppendOneUtf8Character(char*, u32); +SQLITE_PRIVATE void sqlite3QuoteValue(StrAccum*,sqlite3_value*); SQLITE_PRIVATE void sqlite3RegisterBuiltinFunctions(void); SQLITE_PRIVATE void sqlite3RegisterDateTimeFunctions(void); SQLITE_PRIVATE void sqlite3RegisterJsonFunctions(void); @@ -22430,9 +22295,6 @@ static const char * const sqlite3azCompileOpt[] = { #ifdef SQLITE_BUG_COMPATIBLE_20160819 "BUG_COMPATIBLE_20160819", #endif -#ifdef SQLITE_BUG_COMPATIBLE_20250510 - "BUG_COMPATIBLE_20250510", -#endif #ifdef SQLITE_CASE_SENSITIVE_LIKE "CASE_SENSITIVE_LIKE", #endif @@ -22669,9 +22531,6 @@ static const char * const sqlite3azCompileOpt[] = { #ifdef SQLITE_ENABLE_SESSION "ENABLE_SESSION", #endif -#ifdef SQLITE_ENABLE_SETLK_TIMEOUT - "ENABLE_SETLK_TIMEOUT", -#endif #ifdef SQLITE_ENABLE_SNAPSHOT "ENABLE_SNAPSHOT", #endif @@ -22726,9 +22585,6 @@ static const char * const sqlite3azCompileOpt[] = { #ifdef SQLITE_EXTRA_INIT "EXTRA_INIT=" CTIMEOPT_VAL(SQLITE_EXTRA_INIT), #endif -#ifdef SQLITE_EXTRA_INIT_MUTEXED - "EXTRA_INIT_MUTEXED=" CTIMEOPT_VAL(SQLITE_EXTRA_INIT_MUTEXED), -#endif #ifdef SQLITE_EXTRA_SHUTDOWN "EXTRA_SHUTDOWN=" CTIMEOPT_VAL(SQLITE_EXTRA_SHUTDOWN), #endif @@ -23713,19 +23569,12 @@ struct VdbeCursor { #endif VdbeTxtBlbCache *pCache; /* Cache of large TEXT or BLOB values */ - /* Space is allocated for aType to hold at least 2*nField+1 entries: - ** nField slots for aType[] and nField+1 array slots for aOffset[] */ - u32 aType[FLEXARRAY]; /* Type values record decode. MUST BE LAST */ + /* 2*nField extra array elements allocated for aType[], beyond the one + ** static element declared in the structure. nField total array slots for + ** aType[] and nField+1 array slots for aOffset[] */ + u32 aType[1]; /* Type values record decode. MUST BE LAST */ }; -/* -** The size (in bytes) of a VdbeCursor object that has an nField value of N -** or less. The value of SZ_VDBECURSOR(n) is guaranteed to be a multiple -** of 8. -*/ -#define SZ_VDBECURSOR(N) \ - (ROUND8(offsetof(VdbeCursor,aType)) + ((N)+1)*sizeof(u64)) - /* Return true if P is a null-only cursor */ #define IsNullCursor(P) \ @@ -23982,16 +23831,13 @@ struct sqlite3_context { u8 enc; /* Encoding to use for results */ u8 skipFlag; /* Skip accumulator loading if true */ u16 argc; /* Number of arguments */ - sqlite3_value *argv[FLEXARRAY]; /* Argument set */ + sqlite3_value *argv[1]; /* Argument set */ }; -/* -** The size (in bytes) of an sqlite3_context object that holds N -** argv[] arguments. +/* A bitfield type for use inside of structures. Always follow with :N where +** N is the number of bits. */ -#define SZ_CONTEXT(N) \ - (offsetof(sqlite3_context,argv)+(N)*sizeof(sqlite3_value*)) - +typedef unsigned bft; /* Bit Field Type */ /* The ScanStatus object holds a single value for the ** sqlite3_stmt_scanstatus() interface. @@ -24052,7 +23898,7 @@ struct Vdbe { i64 nStmtDefCons; /* Number of def. constraints when stmt started */ i64 nStmtDefImmCons; /* Number of def. imm constraints when stmt started */ Mem *aMem; /* The memory locations */ - Mem **apArg; /* Arguments xUpdate and xFilter vtab methods */ + Mem **apArg; /* Arguments to currently executing user function */ VdbeCursor **apCsr; /* One element of this array for each open cursor */ Mem *aVar; /* Values for the OP_Variable opcode. */ @@ -24072,7 +23918,6 @@ struct Vdbe { #ifdef SQLITE_DEBUG int rcApp; /* errcode set by sqlite3_result_error_code() */ u32 nWrite; /* Number of write operations that have occurred */ - int napArg; /* Size of the apArg[] array */ #endif u16 nResColumn; /* Number of columns in one row of the result set */ u16 nResAlloc; /* Column slots allocated to aColName[] */ @@ -24125,7 +23970,7 @@ struct PreUpdate { VdbeCursor *pCsr; /* Cursor to read old values from */ int op; /* One of SQLITE_INSERT, UPDATE, DELETE */ u8 *aRecord; /* old.* database record */ - KeyInfo *pKeyinfo; /* Key information */ + KeyInfo keyinfo; UnpackedRecord *pUnpacked; /* Unpacked version of aRecord[] */ UnpackedRecord *pNewUnpacked; /* Unpacked version of new.* record */ int iNewReg; /* Register for new.* values */ @@ -24137,7 +23982,6 @@ struct PreUpdate { Table *pTab; /* Schema object being updated */ Index *pPk; /* PK index if pTab is WITHOUT ROWID */ sqlite3_value **apDflt; /* Array of default values, if required */ - u8 keyinfoSpace[SZ_KEYINFO(0)]; /* Space to hold pKeyinfo[0] content */ }; /* @@ -24504,9 +24348,8 @@ SQLITE_PRIVATE int sqlite3LookasideUsed(sqlite3 *db, int *pHighwater){ nInit += countLookasideSlots(db->lookaside.pSmallInit); nFree += countLookasideSlots(db->lookaside.pSmallFree); #endif /* SQLITE_OMIT_TWOSIZE_LOOKASIDE */ - assert( db->lookaside.nSlot >= nInit+nFree ); - if( pHighwater ) *pHighwater = (int)(db->lookaside.nSlot - nInit); - return (int)(db->lookaside.nSlot - (nInit+nFree)); + if( pHighwater ) *pHighwater = db->lookaside.nSlot - nInit; + return db->lookaside.nSlot - (nInit+nFree); } /* @@ -24559,7 +24402,7 @@ SQLITE_API int sqlite3_db_status( assert( (op-SQLITE_DBSTATUS_LOOKASIDE_HIT)>=0 ); assert( (op-SQLITE_DBSTATUS_LOOKASIDE_HIT)<3 ); *pCurrent = 0; - *pHighwater = (int)db->lookaside.anStat[op-SQLITE_DBSTATUS_LOOKASIDE_HIT]; + *pHighwater = db->lookaside.anStat[op - SQLITE_DBSTATUS_LOOKASIDE_HIT]; if( resetFlag ){ db->lookaside.anStat[op - SQLITE_DBSTATUS_LOOKASIDE_HIT] = 0; } @@ -26071,7 +25914,7 @@ static int daysAfterMonday(DateTime *pDate){ ** In other words, return the day of the week according ** to this code: ** -** 0=Sunday, 1=Monday, 2=Tuesday, ..., 6=Saturday +** 0=Sunday, 1=Monday, 2=Tues, ..., 6=Saturday */ static int daysAfterSunday(DateTime *pDate){ assert( pDate->validJD ); @@ -30280,8 +30123,6 @@ SQLITE_PRIVATE sqlite3_mutex_methods const *sqlite3DefaultMutex(void){ #ifdef __CYGWIN__ # include -# include /* amalgamator: dontcache */ -# include /* amalgamator: dontcache */ # include /* amalgamator: dontcache */ #endif @@ -31676,17 +31517,17 @@ SQLITE_PRIVATE int sqlite3ApiExit(sqlite3* db, int rc){ #define etPERCENT 7 /* Percent symbol. %% */ #define etCHARX 8 /* Characters. %c */ /* The rest are extensions, not normally found in printf() */ -#define etESCAPE_q 9 /* Strings with '\'' doubled. %q */ -#define etESCAPE_Q 10 /* Strings with '\'' doubled and enclosed in '', - NULL pointers replaced by SQL NULL. %Q */ -#define etTOKEN 11 /* a pointer to a Token structure */ -#define etSRCITEM 12 /* a pointer to a SrcItem */ -#define etPOINTER 13 /* The %p conversion */ -#define etESCAPE_w 14 /* %w -> Strings with '\"' doubled */ -#define etORDINAL 15 /* %r -> 1st, 2nd, 3rd, 4th, etc. English only */ -#define etDECIMAL 16 /* %d or %u, but not %x, %o */ +#define etSQLESCAPE 9 /* Strings with '\'' doubled. %q */ +#define etSQLESCAPE2 10 /* Strings with '\'' doubled and enclosed in '', + NULL pointers replaced by SQL NULL. %Q */ +#define etTOKEN 11 /* a pointer to a Token structure */ +#define etSRCITEM 12 /* a pointer to a SrcItem */ +#define etPOINTER 13 /* The %p conversion */ +#define etSQLESCAPE3 14 /* %w -> Strings with '\"' doubled */ +#define etORDINAL 15 /* %r -> 1st, 2nd, 3rd, 4th, etc. English only */ +#define etDECIMAL 16 /* %d or %u, but not %x, %o */ -#define etINVALID 17 /* Any unrecognized conversion type */ +#define etINVALID 17 /* Any unrecognized conversion type */ /* @@ -31725,9 +31566,9 @@ static const et_info fmtinfo[] = { { 's', 0, 4, etSTRING, 0, 0 }, { 'g', 0, 1, etGENERIC, 30, 0 }, { 'z', 0, 4, etDYNSTRING, 0, 0 }, - { 'q', 0, 4, etESCAPE_q, 0, 0 }, - { 'Q', 0, 4, etESCAPE_Q, 0, 0 }, - { 'w', 0, 4, etESCAPE_w, 0, 0 }, + { 'q', 0, 4, etSQLESCAPE, 0, 0 }, + { 'Q', 0, 4, etSQLESCAPE2, 0, 0 }, + { 'w', 0, 4, etSQLESCAPE3, 0, 0 }, { 'c', 0, 0, etCHARX, 0, 0 }, { 'o', 8, 0, etRADIX, 0, 2 }, { 'u', 10, 0, etDECIMAL, 0, 0 }, @@ -32324,7 +32165,25 @@ SQLITE_API void sqlite3_str_vappendf( } }else{ unsigned int ch = va_arg(ap,unsigned int); - length = sqlite3AppendOneUtf8Character(buf, ch); + if( ch<0x00080 ){ + buf[0] = ch & 0xff; + length = 1; + }else if( ch<0x00800 ){ + buf[0] = 0xc0 + (u8)((ch>>6)&0x1f); + buf[1] = 0x80 + (u8)(ch & 0x3f); + length = 2; + }else if( ch<0x10000 ){ + buf[0] = 0xe0 + (u8)((ch>>12)&0x0f); + buf[1] = 0x80 + (u8)((ch>>6) & 0x3f); + buf[2] = 0x80 + (u8)(ch & 0x3f); + length = 3; + }else{ + buf[0] = 0xf0 + (u8)((ch>>18) & 0x07); + buf[1] = 0x80 + (u8)((ch>>12) & 0x3f); + buf[2] = 0x80 + (u8)((ch>>6) & 0x3f); + buf[3] = 0x80 + (u8)(ch & 0x3f); + length = 4; + } } if( precision>1 ){ i64 nPrior = 1; @@ -32404,31 +32263,22 @@ SQLITE_API void sqlite3_str_vappendf( while( ii>=0 ) if( (bufpt[ii--] & 0xc0)==0x80 ) width++; } break; - case etESCAPE_q: /* %q: Escape ' characters */ - case etESCAPE_Q: /* %Q: Escape ' and enclose in '...' */ - case etESCAPE_w: { /* %w: Escape " characters */ + case etSQLESCAPE: /* %q: Escape ' characters */ + case etSQLESCAPE2: /* %Q: Escape ' and enclose in '...' */ + case etSQLESCAPE3: { /* %w: Escape " characters */ i64 i, j, k, n; - int needQuote = 0; + int needQuote, isnull; char ch; + char q = ((xtype==etSQLESCAPE3)?'"':'\''); /* Quote character */ char *escarg; - char q; if( bArgList ){ escarg = getTextArg(pArgList); }else{ escarg = va_arg(ap,char*); } - if( escarg==0 ){ - escarg = (xtype==etESCAPE_Q ? "NULL" : "(NULL)"); - }else if( xtype==etESCAPE_Q ){ - needQuote = 1; - } - if( xtype==etESCAPE_w ){ - q = '"'; - flag_alternateform = 0; - }else{ - q = '\''; - } + isnull = escarg==0; + if( isnull ) escarg = (xtype==etSQLESCAPE2 ? "NULL" : "(NULL)"); /* For %q, %Q, and %w, the precision is the number of bytes (or ** characters if the ! flags is present) to use from the input. ** Because of the extra quoting characters inserted, the number @@ -32441,30 +32291,7 @@ SQLITE_API void sqlite3_str_vappendf( while( (escarg[i+1]&0xc0)==0x80 ){ i++; } } } - if( flag_alternateform ){ - /* For %#q, do unistr()-style backslash escapes for - ** all control characters, and for backslash itself. - ** For %#Q, do the same but only if there is at least - ** one control character. */ - u32 nBack = 0; - u32 nCtrl = 0; - for(k=0; ketBUFSIZE ){ bufpt = zExtra = printfTempBuf(pAccum, n); @@ -32473,41 +32300,13 @@ SQLITE_API void sqlite3_str_vappendf( bufpt = buf; } j = 0; - if( needQuote ){ - if( needQuote==2 ){ - memcpy(&bufpt[j], "unistr('", 8); - j += 8; - }else{ - bufpt[j++] = '\''; - } - } + if( needQuote ) bufpt[j++] = q; k = i; - if( flag_alternateform ){ - for(i=0; i=0x10 ? '1' : '0'; - bufpt[j++] = "0123456789abcdef"[ch&0xf]; - } - } - }else{ - for(i=0; imxAlloc>0 && !isMalloced(p) ); - zText = sqlite3DbMallocRaw(p->db, 1+(u64)p->nChar ); + zText = sqlite3DbMallocRaw(p->db, p->nChar+1 ); if( zText ){ memcpy(zText, p->zText, p->nChar+1); p->printfFlags |= SQLITE_PRINTF_MALLOCED; @@ -32995,15 +32794,6 @@ SQLITE_API char *sqlite3_snprintf(int n, char *zBuf, const char *zFormat, ...){ return zBuf; } -/* Maximum size of an sqlite3_log() message. */ -#if defined(SQLITE_MAX_LOG_MESSAGE) - /* Leave the definition as supplied */ -#elif SQLITE_PRINT_BUF_SIZE*10>10000 -# define SQLITE_MAX_LOG_MESSAGE 10000 -#else -# define SQLITE_MAX_LOG_MESSAGE (SQLITE_PRINT_BUF_SIZE*10) -#endif - /* ** This is the routine that actually formats the sqlite3_log() message. ** We house it in a separate routine from sqlite3_log() to avoid using @@ -33020,7 +32810,7 @@ SQLITE_API char *sqlite3_snprintf(int n, char *zBuf, const char *zFormat, ...){ */ static void renderLogMsg(int iErrCode, const char *zFormat, va_list ap){ StrAccum acc; /* String accumulator */ - char zMsg[SQLITE_MAX_LOG_MESSAGE]; /* Complete log message */ + char zMsg[SQLITE_PRINT_BUF_SIZE*3]; /* Complete log message */ sqlite3StrAccumInit(&acc, 0, zMsg, sizeof(zMsg), 0); sqlite3_str_vappendf(&acc, zFormat, ap); @@ -35015,35 +34805,6 @@ static const unsigned char sqlite3Utf8Trans1[] = { } \ } -/* -** Write a single UTF8 character whose value is v into the -** buffer starting at zOut. zOut must be sized to hold at -** least four bytes. Return the number of bytes needed -** to encode the new character. -*/ -SQLITE_PRIVATE int sqlite3AppendOneUtf8Character(char *zOut, u32 v){ - if( v<0x00080 ){ - zOut[0] = (u8)(v & 0xff); - return 1; - } - if( v<0x00800 ){ - zOut[0] = 0xc0 + (u8)((v>>6) & 0x1f); - zOut[1] = 0x80 + (u8)(v & 0x3f); - return 2; - } - if( v<0x10000 ){ - zOut[0] = 0xe0 + (u8)((v>>12) & 0x0f); - zOut[1] = 0x80 + (u8)((v>>6) & 0x3f); - zOut[2] = 0x80 + (u8)(v & 0x3f); - return 3; - } - zOut[0] = 0xf0 + (u8)((v>>18) & 0x07); - zOut[1] = 0x80 + (u8)((v>>12) & 0x3f); - zOut[2] = 0x80 + (u8)((v>>6) & 0x3f); - zOut[3] = 0x80 + (u8)(v & 0x3f); - return 4; -} - /* ** Translate a single UTF-8 character. Return the unicode value. ** @@ -35465,7 +35226,7 @@ SQLITE_PRIVATE int sqlite3Utf16ByteLen(const void *zIn, int nByte, int nChar){ int n = 0; if( SQLITE_UTF16NATIVE==SQLITE_UTF16LE ) z++; - while( n=0xd8 && c<0xdc && z<=zEnd && z[0]>=0xdc && z[0]<0xe0 ) z += 2; @@ -36640,11 +36401,7 @@ SQLITE_PRIVATE void sqlite3FpDecode(FpDecode *p, double r, int iRound, int mxRou } p->z = &p->zBuf[i+1]; assert( i+p->n < sizeof(p->zBuf) ); - assert( p->n>0 ); - while( p->z[p->n-1]=='0' ){ - p->n--; - assert( p->n>0 ); - } + while( ALWAYS(p->n>0) && p->z[p->n-1]=='0' ){ p->n--; } } /* @@ -37149,7 +36906,7 @@ SQLITE_PRIVATE int sqlite3MulInt64(i64 *pA, i64 iB){ } /* -** Compute the absolute value of a 32-bit signed integer, if possible. Or +** Compute the absolute value of a 32-bit signed integer, of possible. Or ** if the integer has a value of -2147483648, return +2147483647 */ SQLITE_PRIVATE int sqlite3AbsInt32(int x){ @@ -37430,19 +37187,12 @@ SQLITE_PRIVATE void sqlite3HashClear(Hash *pH){ */ static unsigned int strHash(const char *z){ unsigned int h = 0; - while( z[0] ){ /*OPTIMIZATION-IF-TRUE*/ + unsigned char c; + while( (c = (unsigned char)*z++)!=0 ){ /*OPTIMIZATION-IF-TRUE*/ /* Knuth multiplicative hashing. (Sorting & Searching, p. 510). ** 0x9e3779b1 is 2654435761 which is the closest prime number to - ** (2**32)*golden_ratio, where golden_ratio = (sqrt(5) - 1)/2. - ** - ** Only bits 0xdf for ASCII and bits 0xbf for EBCDIC each octet are - ** hashed since the omitted bits determine the upper/lower case difference. - */ -#ifdef SQLITE_EBCDIC - h += 0xbf & (unsigned char)*(z++); -#else - h += 0xdf & (unsigned char)*(z++); -#endif + ** (2**32)*golden_ratio, where golden_ratio = (sqrt(5) - 1)/2. */ + h += sqlite3UpperToLower[c]; h *= 0x9e3779b1; } return h; @@ -37515,8 +37265,9 @@ static int rehash(Hash *pH, unsigned int new_size){ pH->htsize = new_size = sqlite3MallocSize(new_ht)/sizeof(struct _ht); memset(new_ht, 0, new_size*sizeof(struct _ht)); for(elem=pH->first, pH->first=0; elem; elem = next_elem){ + unsigned int h = strHash(elem->pKey) % new_size; next_elem = elem->next; - insertElement(pH, &new_ht[elem->h % new_size], elem); + insertElement(pH, &new_ht[h], elem); } return 1; } @@ -37534,22 +37285,23 @@ static HashElem *findElementWithHash( HashElem *elem; /* Used to loop thru the element list */ unsigned int count; /* Number of elements left to test */ unsigned int h; /* The computed hash */ - static HashElem nullElement = { 0, 0, 0, 0, 0 }; + static HashElem nullElement = { 0, 0, 0, 0 }; - h = strHash(pKey); if( pH->ht ){ /*OPTIMIZATION-IF-TRUE*/ struct _ht *pEntry; - pEntry = &pH->ht[h % pH->htsize]; + h = strHash(pKey) % pH->htsize; + pEntry = &pH->ht[h]; elem = pEntry->chain; count = pEntry->count; }else{ + h = 0; elem = pH->first; count = pH->count; } if( pHash ) *pHash = h; while( count ){ assert( elem!=0 ); - if( h==elem->h && sqlite3StrICmp(elem->pKey,pKey)==0 ){ + if( sqlite3StrICmp(elem->pKey,pKey)==0 ){ return elem; } elem = elem->next; @@ -37561,9 +37313,10 @@ static HashElem *findElementWithHash( /* Remove a single entry from the hash table given a pointer to that ** element and a hash on the element's key. */ -static void removeElement( +static void removeElementGivenHash( Hash *pH, /* The pH containing "elem" */ - HashElem *elem /* The element to be removed from the pH */ + HashElem* elem, /* The element to be removed from the pH */ + unsigned int h /* Hash value for the element */ ){ struct _ht *pEntry; if( elem->prev ){ @@ -37575,7 +37328,7 @@ static void removeElement( elem->next->prev = elem->prev; } if( pH->ht ){ - pEntry = &pH->ht[elem->h % pH->htsize]; + pEntry = &pH->ht[h]; if( pEntry->chain==elem ){ pEntry->chain = elem->next; } @@ -37626,7 +37379,7 @@ SQLITE_PRIVATE void *sqlite3HashInsert(Hash *pH, const char *pKey, void *data){ if( elem->data ){ void *old_data = elem->data; if( data==0 ){ - removeElement(pH,elem); + removeElementGivenHash(pH,elem,h); }else{ elem->data = data; elem->pKey = pKey; @@ -37637,13 +37390,15 @@ SQLITE_PRIVATE void *sqlite3HashInsert(Hash *pH, const char *pKey, void *data){ new_elem = (HashElem*)sqlite3Malloc( sizeof(HashElem) ); if( new_elem==0 ) return data; new_elem->pKey = pKey; - new_elem->h = h; new_elem->data = data; pH->count++; - if( pH->count>=5 && pH->count > 2*pH->htsize ){ - rehash(pH, pH->count*3); + if( pH->count>=10 && pH->count > 2*pH->htsize ){ + if( rehash(pH, pH->count*2) ){ + assert( pH->htsize>0 ); + h = strHash(pKey) % pH->htsize; + } } - insertElement(pH, pH->ht ? &pH->ht[new_elem->h % pH->htsize] : 0, new_elem); + insertElement(pH, pH->ht ? &pH->ht[h] : 0, new_elem); return 0; } @@ -39126,7 +38881,6 @@ struct unixFile { #endif #ifdef SQLITE_ENABLE_SETLK_TIMEOUT unsigned iBusyTimeout; /* Wait this many millisec on locks */ - int bBlockOnConnect; /* True to block for SHARED locks */ #endif #if OS_VXWORKS struct vxworksFileId *pId; /* Unique file ID */ @@ -40520,13 +40274,6 @@ static int unixFileLock(unixFile *pFile, struct flock *pLock){ rc = 0; } }else{ -#ifdef SQLITE_ENABLE_SETLK_TIMEOUT - if( pFile->bBlockOnConnect && pLock->l_type==F_RDLCK - && pLock->l_start==SHARED_FIRST && pLock->l_len==SHARED_SIZE - ){ - rc = osFcntl(pFile->h, F_SETLKW, pLock); - }else -#endif rc = osSetPosixAdvisoryLock(pFile->h, pLock, pFile); } return rc; @@ -42888,9 +42635,8 @@ static int unixFileControl(sqlite3_file *id, int op, void *pArg){ #ifdef SQLITE_ENABLE_SETLK_TIMEOUT case SQLITE_FCNTL_LOCK_TIMEOUT: { int iOld = pFile->iBusyTimeout; - int iNew = *(int*)pArg; #if SQLITE_ENABLE_SETLK_TIMEOUT==1 - pFile->iBusyTimeout = iNew<0 ? 0x7FFFFFFF : (unsigned)iNew; + pFile->iBusyTimeout = *(int*)pArg; #elif SQLITE_ENABLE_SETLK_TIMEOUT==2 pFile->iBusyTimeout = !!(*(int*)pArg); #else @@ -42899,12 +42645,7 @@ static int unixFileControl(sqlite3_file *id, int op, void *pArg){ *(int*)pArg = iOld; return SQLITE_OK; } - case SQLITE_FCNTL_BLOCK_ON_CONNECT: { - int iNew = *(int*)pArg; - pFile->bBlockOnConnect = iNew; - return SQLITE_OK; - } -#endif /* SQLITE_ENABLE_SETLK_TIMEOUT */ +#endif #if SQLITE_MAX_MMAP_SIZE>0 case SQLITE_FCNTL_MMAP_SIZE: { i64 newLimit = *(i64*)pArg; @@ -43877,20 +43618,21 @@ static int unixShmLock( /* Check that, if this to be a blocking lock, no locks that occur later ** in the following list than the lock being obtained are already held: ** - ** 1. Recovery lock (ofst==2). - ** 2. Checkpointer lock (ofst==1). - ** 3. Write lock (ofst==0). - ** 4. Read locks (ofst>=3 && ofst=3 && ofstexclMask|p->sharedMask); assert( (flags & SQLITE_SHM_UNLOCK) || pDbFd->iBusyTimeout==0 || ( - (ofst!=2 || lockMask==0) + (ofst!=2) /* not RECOVER */ && (ofst!=1 || lockMask==0 || lockMask==2) && (ofst!=0 || lockMask<3) && (ofst<3 || lockMask<(1<iBusyTimeout -#else -# define winFileBusyTimeout(pDbFd) 0 -#endif - /* ** The winVfsAppData structure is used for the pAppData member for all of the ** Win32 VFS variants. @@ -47746,7 +47478,7 @@ static struct win_syscall { { "FileTimeToLocalFileTime", (SYSCALL)0, 0 }, #endif -#define osFileTimeToLocalFileTime ((BOOL(WINAPI*)(const FILETIME*, \ +#define osFileTimeToLocalFileTime ((BOOL(WINAPI*)(CONST FILETIME*, \ LPFILETIME))aSyscall[11].pCurrent) #if SQLITE_OS_WINCE @@ -47755,7 +47487,7 @@ static struct win_syscall { { "FileTimeToSystemTime", (SYSCALL)0, 0 }, #endif -#define osFileTimeToSystemTime ((BOOL(WINAPI*)(const FILETIME*, \ +#define osFileTimeToSystemTime ((BOOL(WINAPI*)(CONST FILETIME*, \ LPSYSTEMTIME))aSyscall[12].pCurrent) { "FlushFileBuffers", (SYSCALL)FlushFileBuffers, 0 }, @@ -47861,12 +47593,6 @@ static struct win_syscall { #define osGetFullPathNameW ((DWORD(WINAPI*)(LPCWSTR,DWORD,LPWSTR, \ LPWSTR*))aSyscall[25].pCurrent) -/* -** For GetLastError(), MSDN says: -** -** Minimum supported client: Windows XP [desktop apps | UWP apps] -** Minimum supported server: Windows Server 2003 [desktop apps | UWP apps] -*/ { "GetLastError", (SYSCALL)GetLastError, 0 }, #define osGetLastError ((DWORD(WINAPI*)(VOID))aSyscall[26].pCurrent) @@ -48035,7 +47761,7 @@ static struct win_syscall { { "LockFile", (SYSCALL)0, 0 }, #endif -#if !defined(osLockFile) && defined(SQLITE_WIN32_HAS_ANSI) +#ifndef osLockFile #define osLockFile ((BOOL(WINAPI*)(HANDLE,DWORD,DWORD,DWORD, \ DWORD))aSyscall[47].pCurrent) #endif @@ -48099,7 +47825,7 @@ static struct win_syscall { { "SystemTimeToFileTime", (SYSCALL)SystemTimeToFileTime, 0 }, -#define osSystemTimeToFileTime ((BOOL(WINAPI*)(const SYSTEMTIME*, \ +#define osSystemTimeToFileTime ((BOOL(WINAPI*)(CONST SYSTEMTIME*, \ LPFILETIME))aSyscall[56].pCurrent) #if !SQLITE_OS_WINCE && !SQLITE_OS_WINRT @@ -48108,7 +47834,7 @@ static struct win_syscall { { "UnlockFile", (SYSCALL)0, 0 }, #endif -#if !defined(osUnlockFile) && defined(SQLITE_WIN32_HAS_ANSI) +#ifndef osUnlockFile #define osUnlockFile ((BOOL(WINAPI*)(HANDLE,DWORD,DWORD,DWORD, \ DWORD))aSyscall[57].pCurrent) #endif @@ -48149,13 +47875,11 @@ static struct win_syscall { #define osCreateEventExW ((HANDLE(WINAPI*)(LPSECURITY_ATTRIBUTES,LPCWSTR, \ DWORD,DWORD))aSyscall[62].pCurrent) -/* -** For WaitForSingleObject(), MSDN says: -** -** Minimum supported client: Windows XP [desktop apps | UWP apps] -** Minimum supported server: Windows Server 2003 [desktop apps | UWP apps] -*/ +#if !SQLITE_OS_WINRT { "WaitForSingleObject", (SYSCALL)WaitForSingleObject, 0 }, +#else + { "WaitForSingleObject", (SYSCALL)0, 0 }, +#endif #define osWaitForSingleObject ((DWORD(WINAPI*)(HANDLE, \ DWORD))aSyscall[63].pCurrent) @@ -48302,97 +48026,6 @@ static struct win_syscall { #define osFlushViewOfFile \ ((BOOL(WINAPI*)(LPCVOID,SIZE_T))aSyscall[79].pCurrent) -/* -** If SQLITE_ENABLE_SETLK_TIMEOUT is defined, we require CreateEvent() -** to implement blocking locks with timeouts. MSDN says: -** -** Minimum supported client: Windows XP [desktop apps | UWP apps] -** Minimum supported server: Windows Server 2003 [desktop apps | UWP apps] -*/ -#ifdef SQLITE_ENABLE_SETLK_TIMEOUT - { "CreateEvent", (SYSCALL)CreateEvent, 0 }, -#else - { "CreateEvent", (SYSCALL)0, 0 }, -#endif - -#define osCreateEvent ( \ - (HANDLE(WINAPI*) (LPSECURITY_ATTRIBUTES,BOOL,BOOL,LPCSTR)) \ - aSyscall[80].pCurrent \ -) - -/* -** If SQLITE_ENABLE_SETLK_TIMEOUT is defined, we require CancelIo() -** for the case where a timeout expires and a lock request must be -** cancelled. -** -** Minimum supported client: Windows XP [desktop apps | UWP apps] -** Minimum supported server: Windows Server 2003 [desktop apps | UWP apps] -*/ -#ifdef SQLITE_ENABLE_SETLK_TIMEOUT - { "CancelIo", (SYSCALL)CancelIo, 0 }, -#else - { "CancelIo", (SYSCALL)0, 0 }, -#endif - -#define osCancelIo ((BOOL(WINAPI*)(HANDLE))aSyscall[81].pCurrent) - -#if defined(SQLITE_WIN32_HAS_WIDE) && defined(_WIN32) - { "GetModuleHandleW", (SYSCALL)GetModuleHandleW, 0 }, -#else - { "GetModuleHandleW", (SYSCALL)0, 0 }, -#endif - -#define osGetModuleHandleW ((HMODULE(WINAPI*)(LPCWSTR))aSyscall[82].pCurrent) - -#ifndef _WIN32 - { "getenv", (SYSCALL)getenv, 0 }, -#else - { "getenv", (SYSCALL)0, 0 }, -#endif - -#define osGetenv ((const char *(*)(const char *))aSyscall[83].pCurrent) - -#ifndef _WIN32 - { "getcwd", (SYSCALL)getcwd, 0 }, -#else - { "getcwd", (SYSCALL)0, 0 }, -#endif - -#define osGetcwd ((char*(*)(char*,size_t))aSyscall[84].pCurrent) - -#ifndef _WIN32 - { "readlink", (SYSCALL)readlink, 0 }, -#else - { "readlink", (SYSCALL)0, 0 }, -#endif - -#define osReadlink ((ssize_t(*)(const char*,char*,size_t))aSyscall[85].pCurrent) - -#ifndef _WIN32 - { "lstat", (SYSCALL)lstat, 0 }, -#else - { "lstat", (SYSCALL)0, 0 }, -#endif - -#define osLstat ((int(*)(const char*,struct stat*))aSyscall[86].pCurrent) - -#ifndef _WIN32 - { "__errno", (SYSCALL)__errno, 0 }, -#else - { "__errno", (SYSCALL)0, 0 }, -#endif - -#define osErrno (*((int*(*)(void))aSyscall[87].pCurrent)()) - -#ifndef _WIN32 - { "cygwin_conv_path", (SYSCALL)cygwin_conv_path, 0 }, -#else - { "cygwin_conv_path", (SYSCALL)0, 0 }, -#endif - -#define osCygwin_conv_path ((size_t(*)(unsigned int, \ - const void *, void *, size_t))aSyscall[88].pCurrent) - }; /* End of the overrideable system calls */ /* @@ -48566,7 +48199,6 @@ SQLITE_API int sqlite3_win32_reset_heap(){ } #endif /* SQLITE_WIN32_MALLOC */ -#ifdef _WIN32 /* ** This function outputs the specified (ANSI) string to the Win32 debugger ** (if available). @@ -48609,7 +48241,6 @@ SQLITE_API void sqlite3_win32_write_debug(const char *zBuf, int nBuf){ } #endif } -#endif /* _WIN32 */ /* ** The following routine suspends the current thread for at least ms @@ -48693,9 +48324,7 @@ SQLITE_API int sqlite3_win32_is_nt(void){ } return osInterlockedCompareExchange(&sqlite3_os_type, 2, 2)==2; #elif SQLITE_TEST - return osInterlockedCompareExchange(&sqlite3_os_type, 2, 2)==2 - || osInterlockedCompareExchange(&sqlite3_os_type, 0, 0)==0 - ; + return osInterlockedCompareExchange(&sqlite3_os_type, 2, 2)==2; #else /* ** NOTE: All sub-platforms where the GetVersionEx[AW] functions are @@ -48910,7 +48539,6 @@ SQLITE_PRIVATE void sqlite3MemSetDefault(void){ } #endif /* SQLITE_WIN32_MALLOC */ -#ifdef _WIN32 /* ** Convert a UTF-8 string to Microsoft Unicode. ** @@ -48936,7 +48564,6 @@ static LPWSTR winUtf8ToUnicode(const char *zText){ } return zWideText; } -#endif /* _WIN32 */ /* ** Convert a Microsoft Unicode string to UTF-8. @@ -48971,29 +48598,28 @@ static char *winUnicodeToUtf8(LPCWSTR zWideText){ ** Space to hold the returned string is obtained from sqlite3_malloc(). */ static LPWSTR winMbcsToUnicode(const char *zText, int useAnsi){ - int nWideChar; + int nByte; LPWSTR zMbcsText; int codepage = useAnsi ? CP_ACP : CP_OEMCP; - nWideChar = osMultiByteToWideChar(codepage, 0, zText, -1, NULL, - 0); - if( nWideChar==0 ){ + nByte = osMultiByteToWideChar(codepage, 0, zText, -1, NULL, + 0)*sizeof(WCHAR); + if( nByte==0 ){ return 0; } - zMbcsText = sqlite3MallocZero( nWideChar*sizeof(WCHAR) ); + zMbcsText = sqlite3MallocZero( nByte*sizeof(WCHAR) ); if( zMbcsText==0 ){ return 0; } - nWideChar = osMultiByteToWideChar(codepage, 0, zText, -1, zMbcsText, - nWideChar); - if( nWideChar==0 ){ + nByte = osMultiByteToWideChar(codepage, 0, zText, -1, zMbcsText, + nByte); + if( nByte==0 ){ sqlite3_free(zMbcsText); zMbcsText = 0; } return zMbcsText; } -#ifdef _WIN32 /* ** Convert a Microsoft Unicode string to a multi-byte character string, ** using the ANSI or OEM code page. @@ -49021,7 +48647,6 @@ static char *winUnicodeToMbcs(LPCWSTR zWideText, int useAnsi){ } return zText; } -#endif /* _WIN32 */ /* ** Convert a multi-byte character string to UTF-8. @@ -49041,7 +48666,6 @@ static char *winMbcsToUtf8(const char *zText, int useAnsi){ return zTextUtf8; } -#ifdef _WIN32 /* ** Convert a UTF-8 string to a multi-byte character string. ** @@ -49091,7 +48715,6 @@ SQLITE_API char *sqlite3_win32_unicode_to_utf8(LPCWSTR zWideText){ #endif return winUnicodeToUtf8(zWideText); } -#endif /* _WIN32 */ /* ** This is a public wrapper for the winMbcsToUtf8() function. @@ -49109,7 +48732,6 @@ SQLITE_API char *sqlite3_win32_mbcs_to_utf8(const char *zText){ return winMbcsToUtf8(zText, osAreFileApisANSI()); } -#ifdef _WIN32 /* ** This is a public wrapper for the winMbcsToUtf8() function. */ @@ -49234,7 +48856,6 @@ SQLITE_API int sqlite3_win32_set_directory( ){ return sqlite3_win32_set_directory16(type, zValue); } -#endif /* _WIN32 */ /* ** The return value of winGetLastErrorMsg @@ -49783,98 +49404,13 @@ static BOOL winLockFile( ovlp.Offset = offsetLow; ovlp.OffsetHigh = offsetHigh; return osLockFileEx(*phFile, flags, 0, numBytesLow, numBytesHigh, &ovlp); -#ifdef SQLITE_WIN32_HAS_ANSI }else{ return osLockFile(*phFile, offsetLow, offsetHigh, numBytesLow, numBytesHigh); -#endif } #endif } -/* -** Lock a region of nByte bytes starting at offset offset of file hFile. -** Take an EXCLUSIVE lock if parameter bExclusive is true, or a SHARED lock -** otherwise. If nMs is greater than zero and the lock cannot be obtained -** immediately, block for that many ms before giving up. -** -** This function returns SQLITE_OK if the lock is obtained successfully. If -** some other process holds the lock, SQLITE_BUSY is returned if nMs==0, or -** SQLITE_BUSY_TIMEOUT otherwise. Or, if an error occurs, SQLITE_IOERR. -*/ -static int winHandleLockTimeout( - HANDLE hFile, - DWORD offset, - DWORD nByte, - int bExcl, - DWORD nMs -){ - DWORD flags = LOCKFILE_FAIL_IMMEDIATELY | (bExcl?LOCKFILE_EXCLUSIVE_LOCK:0); - int rc = SQLITE_OK; - BOOL ret; - - if( !osIsNT() ){ - ret = winLockFile(&hFile, flags, offset, 0, nByte, 0); - }else{ - OVERLAPPED ovlp; - memset(&ovlp, 0, sizeof(OVERLAPPED)); - ovlp.Offset = offset; - -#ifdef SQLITE_ENABLE_SETLK_TIMEOUT - if( nMs!=0 ){ - flags &= ~LOCKFILE_FAIL_IMMEDIATELY; - } - ovlp.hEvent = osCreateEvent(NULL, TRUE, FALSE, NULL); - if( ovlp.hEvent==NULL ){ - return SQLITE_IOERR_LOCK; - } -#endif - - ret = osLockFileEx(hFile, flags, 0, nByte, 0, &ovlp); - -#ifdef SQLITE_ENABLE_SETLK_TIMEOUT - /* If SQLITE_ENABLE_SETLK_TIMEOUT is defined, then the file-handle was - ** opened with FILE_FLAG_OVERHEAD specified. In this case, the call to - ** LockFileEx() may fail because the request is still pending. This can - ** happen even if LOCKFILE_FAIL_IMMEDIATELY was specified. - ** - ** If nMs is 0, then LOCKFILE_FAIL_IMMEDIATELY was set in the flags - ** passed to LockFileEx(). In this case, if the operation is pending, - ** block indefinitely until it is finished. - ** - ** Otherwise, wait for up to nMs ms for the operation to finish. nMs - ** may be set to INFINITE. - */ - if( !ret && GetLastError()==ERROR_IO_PENDING ){ - DWORD nDelay = (nMs==0 ? INFINITE : nMs); - DWORD res = osWaitForSingleObject(ovlp.hEvent, nDelay); - if( res==WAIT_OBJECT_0 ){ - ret = TRUE; - }else if( res==WAIT_TIMEOUT ){ -#if SQLITE_ENABLE_SETLK_TIMEOUT==1 - rc = SQLITE_BUSY_TIMEOUT; -#else - rc = SQLITE_BUSY; -#endif - }else{ - /* Some other error has occurred */ - rc = SQLITE_IOERR_LOCK; - } - - /* If it is still pending, cancel the LockFileEx() call. */ - osCancelIo(hFile); - } - - osCloseHandle(ovlp.hEvent); -#endif - } - - if( rc==SQLITE_OK && !ret ){ - rc = SQLITE_BUSY; - } - return rc; -} - /* ** Unlock a file region. */ @@ -49899,23 +49435,13 @@ static BOOL winUnlockFile( ovlp.Offset = offsetLow; ovlp.OffsetHigh = offsetHigh; return osUnlockFileEx(*phFile, 0, numBytesLow, numBytesHigh, &ovlp); -#ifdef SQLITE_WIN32_HAS_ANSI }else{ return osUnlockFile(*phFile, offsetLow, offsetHigh, numBytesLow, numBytesHigh); -#endif } #endif } -/* -** Remove an nByte lock starting at offset iOff from HANDLE h. -*/ -static int winHandleUnlock(HANDLE h, int iOff, int nByte){ - BOOL ret = winUnlockFile(&h, iOff, 0, nByte, 0); - return (ret ? SQLITE_OK : SQLITE_IOERR_UNLOCK); -} - /***************************************************************************** ** The next group of routines implement the I/O methods specified ** by the sqlite3_io_methods object. @@ -49929,69 +49455,65 @@ static int winHandleUnlock(HANDLE h, int iOff, int nByte){ #endif /* -** Seek the file handle h to offset nByte of the file. -** -** If successful, return SQLITE_OK. Or, if an error occurs, return an SQLite -** error code. +** Move the current position of the file handle passed as the first +** argument to offset iOffset within the file. If successful, return 0. +** Otherwise, set pFile->lastErrno and return non-zero. */ -static int winHandleSeek(HANDLE h, sqlite3_int64 iOffset){ - int rc = SQLITE_OK; /* Return value */ - +static int winSeekFile(winFile *pFile, sqlite3_int64 iOffset){ #if !SQLITE_OS_WINRT LONG upperBits; /* Most sig. 32 bits of new offset */ LONG lowerBits; /* Least sig. 32 bits of new offset */ DWORD dwRet; /* Value returned by SetFilePointer() */ + DWORD lastErrno; /* Value returned by GetLastError() */ + + OSTRACE(("SEEK file=%p, offset=%lld\n", pFile->h, iOffset)); upperBits = (LONG)((iOffset>>32) & 0x7fffffff); lowerBits = (LONG)(iOffset & 0xffffffff); - dwRet = osSetFilePointer(h, lowerBits, &upperBits, FILE_BEGIN); - /* API oddity: If successful, SetFilePointer() returns a dword ** containing the lower 32-bits of the new file-offset. Or, if it fails, ** it returns INVALID_SET_FILE_POINTER. However according to MSDN, ** INVALID_SET_FILE_POINTER may also be a valid new offset. So to determine ** whether an error has actually occurred, it is also necessary to call - ** GetLastError(). */ - if( dwRet==INVALID_SET_FILE_POINTER ){ - DWORD lastErrno = osGetLastError(); - if( lastErrno!=NO_ERROR ){ - rc = SQLITE_IOERR_SEEK; - } + ** GetLastError(). + */ + dwRet = osSetFilePointer(pFile->h, lowerBits, &upperBits, FILE_BEGIN); + + if( (dwRet==INVALID_SET_FILE_POINTER + && ((lastErrno = osGetLastError())!=NO_ERROR)) ){ + pFile->lastErrno = lastErrno; + winLogError(SQLITE_IOERR_SEEK, pFile->lastErrno, + "winSeekFile", pFile->zPath); + OSTRACE(("SEEK file=%p, rc=SQLITE_IOERR_SEEK\n", pFile->h)); + return 1; } + + OSTRACE(("SEEK file=%p, rc=SQLITE_OK\n", pFile->h)); + return 0; #else - /* This implementation works for WinRT. */ + /* + ** Same as above, except that this implementation works for WinRT. + */ + LARGE_INTEGER x; /* The new offset */ BOOL bRet; /* Value returned by SetFilePointerEx() */ x.QuadPart = iOffset; - bRet = osSetFilePointerEx(h, x, 0, FILE_BEGIN); + bRet = osSetFilePointerEx(pFile->h, x, 0, FILE_BEGIN); if(!bRet){ - rc = SQLITE_IOERR_SEEK; - } -#endif - - OSTRACE(("SEEK file=%p, offset=%lld rc=%s\n", h, iOffset, sqlite3ErrName(rc))); - return rc; -} - -/* -** Move the current position of the file handle passed as the first -** argument to offset iOffset within the file. If successful, return 0. -** Otherwise, set pFile->lastErrno and return non-zero. -*/ -static int winSeekFile(winFile *pFile, sqlite3_int64 iOffset){ - int rc; - - rc = winHandleSeek(pFile->h, iOffset); - if( rc!=SQLITE_OK ){ pFile->lastErrno = osGetLastError(); - winLogError(rc, pFile->lastErrno, "winSeekFile", pFile->zPath); + winLogError(SQLITE_IOERR_SEEK, pFile->lastErrno, + "winSeekFile", pFile->zPath); + OSTRACE(("SEEK file=%p, rc=SQLITE_IOERR_SEEK\n", pFile->h)); + return 1; } - return rc; -} + OSTRACE(("SEEK file=%p, rc=SQLITE_OK\n", pFile->h)); + return 0; +#endif +} #if SQLITE_MAX_MMAP_SIZE>0 /* Forward references to VFS helper methods used for memory mapped files */ @@ -50252,60 +49774,6 @@ static int winWrite( return SQLITE_OK; } -/* -** Truncate the file opened by handle h to nByte bytes in size. -*/ -static int winHandleTruncate(HANDLE h, sqlite3_int64 nByte){ - int rc = SQLITE_OK; /* Return code */ - rc = winHandleSeek(h, nByte); - if( rc==SQLITE_OK ){ - if( 0==osSetEndOfFile(h) ){ - rc = SQLITE_IOERR_TRUNCATE; - } - } - return rc; -} - -/* -** Determine the size in bytes of the file opened by the handle passed as -** the first argument. -*/ -static int winHandleSize(HANDLE h, sqlite3_int64 *pnByte){ - int rc = SQLITE_OK; - -#if SQLITE_OS_WINRT - FILE_STANDARD_INFO info; - BOOL b; - b = osGetFileInformationByHandleEx(h, FileStandardInfo, &info, sizeof(info)); - if( b ){ - *pnByte = info.EndOfFile.QuadPart; - }else{ - rc = SQLITE_IOERR_FSTAT; - } -#else - DWORD upperBits = 0; - DWORD lowerBits = 0; - - assert( pnByte ); - lowerBits = osGetFileSize(h, &upperBits); - *pnByte = (((sqlite3_int64)upperBits)<<32) + lowerBits; - if( lowerBits==INVALID_FILE_SIZE && osGetLastError()!=NO_ERROR ){ - rc = SQLITE_IOERR_FSTAT; - } -#endif - - return rc; -} - -/* -** Close the handle passed as the only argument. -*/ -static void winHandleClose(HANDLE h){ - if( h!=INVALID_HANDLE_VALUE ){ - osCloseHandle(h); - } -} - /* ** Truncate an open file to a specified size */ @@ -50561,9 +50029,8 @@ static int winFileSize(sqlite3_file *id, sqlite3_int64 *pSize){ ** Different API routines are called depending on whether or not this ** is Win9x or WinNT. */ -static int winGetReadLock(winFile *pFile, int bBlock){ +static int winGetReadLock(winFile *pFile){ int res; - DWORD mask = ~(bBlock ? LOCKFILE_FAIL_IMMEDIATELY : 0); OSTRACE(("READ-LOCK file=%p, lock=%d\n", pFile->h, pFile->locktype)); if( osIsNT() ){ #if SQLITE_OS_WINCE @@ -50573,7 +50040,7 @@ static int winGetReadLock(winFile *pFile, int bBlock){ */ res = winceLockFile(&pFile->h, SHARED_FIRST, 0, 1, 0); #else - res = winLockFile(&pFile->h, SQLITE_LOCKFILEEX_FLAGS&mask, SHARED_FIRST, 0, + res = winLockFile(&pFile->h, SQLITE_LOCKFILEEX_FLAGS, SHARED_FIRST, 0, SHARED_SIZE, 0); #endif } @@ -50582,7 +50049,7 @@ static int winGetReadLock(winFile *pFile, int bBlock){ int lk; sqlite3_randomness(sizeof(lk), &lk); pFile->sharedLockByte = (short)((lk & 0x7fffffff)%(SHARED_SIZE - 1)); - res = winLockFile(&pFile->h, SQLITE_LOCKFILE_FLAGS&mask, + res = winLockFile(&pFile->h, SQLITE_LOCKFILE_FLAGS, SHARED_FIRST+pFile->sharedLockByte, 0, 1, 0); } #endif @@ -50677,62 +50144,46 @@ static int winLock(sqlite3_file *id, int locktype){ assert( locktype!=PENDING_LOCK ); assert( locktype!=RESERVED_LOCK || pFile->locktype==SHARED_LOCK ); - /* Lock the PENDING_LOCK byte if we need to acquire an EXCLUSIVE lock or + /* Lock the PENDING_LOCK byte if we need to acquire a PENDING lock or ** a SHARED lock. If we are acquiring a SHARED lock, the acquisition of ** the PENDING_LOCK byte is temporary. */ newLocktype = pFile->locktype; - if( locktype==SHARED_LOCK - || (locktype==EXCLUSIVE_LOCK && pFile->locktype==RESERVED_LOCK) + if( pFile->locktype==NO_LOCK + || (locktype==EXCLUSIVE_LOCK && pFile->locktype<=RESERVED_LOCK) ){ int cnt = 3; - - /* Flags for the LockFileEx() call. This should be an exclusive lock if - ** this call is to obtain EXCLUSIVE, or a shared lock if this call is to - ** obtain SHARED. */ - int flags = LOCKFILE_FAIL_IMMEDIATELY; - if( locktype==EXCLUSIVE_LOCK ){ - flags |= LOCKFILE_EXCLUSIVE_LOCK; - } - while( cnt>0 ){ + while( cnt-->0 && (res = winLockFile(&pFile->h, SQLITE_LOCKFILE_FLAGS, + PENDING_BYTE, 0, 1, 0))==0 ){ /* Try 3 times to get the pending lock. This is needed to work ** around problems caused by indexing and/or anti-virus software on ** Windows systems. - ** ** If you are using this code as a model for alternative VFSes, do not - ** copy this retry logic. It is a hack intended for Windows only. */ - res = winLockFile(&pFile->h, flags, PENDING_BYTE, 0, 1, 0); - if( res ) break; - + ** copy this retry logic. It is a hack intended for Windows only. + */ lastErrno = osGetLastError(); OSTRACE(("LOCK-PENDING-FAIL file=%p, count=%d, result=%d\n", - pFile->h, cnt, res - )); - + pFile->h, cnt, res)); if( lastErrno==ERROR_INVALID_HANDLE ){ pFile->lastErrno = lastErrno; rc = SQLITE_IOERR_LOCK; OSTRACE(("LOCK-FAIL file=%p, count=%d, rc=%s\n", - pFile->h, cnt, sqlite3ErrName(rc) - )); + pFile->h, cnt, sqlite3ErrName(rc))); return rc; } - - cnt--; - if( cnt>0 ) sqlite3_win32_sleep(1); + if( cnt ) sqlite3_win32_sleep(1); } gotPendingLock = res; + if( !res ){ + lastErrno = osGetLastError(); + } } /* Acquire a shared lock */ if( locktype==SHARED_LOCK && res ){ assert( pFile->locktype==NO_LOCK ); -#ifdef SQLITE_ENABLE_SETLK_TIMEOUT - res = winGetReadLock(pFile, pFile->bBlockOnConnect); -#else - res = winGetReadLock(pFile, 0); -#endif + res = winGetReadLock(pFile); if( res ){ newLocktype = SHARED_LOCK; }else{ @@ -50770,7 +50221,7 @@ static int winLock(sqlite3_file *id, int locktype){ newLocktype = EXCLUSIVE_LOCK; }else{ lastErrno = osGetLastError(); - winGetReadLock(pFile, 0); + winGetReadLock(pFile); } } @@ -50850,7 +50301,7 @@ static int winUnlock(sqlite3_file *id, int locktype){ type = pFile->locktype; if( type>=EXCLUSIVE_LOCK ){ winUnlockFile(&pFile->h, SHARED_FIRST, 0, SHARED_SIZE, 0); - if( locktype==SHARED_LOCK && !winGetReadLock(pFile, 0) ){ + if( locktype==SHARED_LOCK && !winGetReadLock(pFile) ){ /* This should never happen. We should always be able to ** reacquire the read lock */ rc = winLogError(SQLITE_IOERR_UNLOCK, osGetLastError(), @@ -51060,28 +50511,6 @@ static int winFileControl(sqlite3_file *id, int op, void *pArg){ return rc; } #endif - -#ifdef SQLITE_ENABLE_SETLK_TIMEOUT - case SQLITE_FCNTL_LOCK_TIMEOUT: { - int iOld = pFile->iBusyTimeout; - int iNew = *(int*)pArg; -#if SQLITE_ENABLE_SETLK_TIMEOUT==1 - pFile->iBusyTimeout = (iNew < 0) ? INFINITE : (DWORD)iNew; -#elif SQLITE_ENABLE_SETLK_TIMEOUT==2 - pFile->iBusyTimeout = (DWORD)(!!iNew); -#else -# error "SQLITE_ENABLE_SETLK_TIMEOUT must be set to 1 or 2" -#endif - *(int*)pArg = iOld; - return SQLITE_OK; - } - case SQLITE_FCNTL_BLOCK_ON_CONNECT: { - int iNew = *(int*)pArg; - pFile->bBlockOnConnect = iNew; - return SQLITE_OK; - } -#endif /* SQLITE_ENABLE_SETLK_TIMEOUT */ - } OSTRACE(("FCNTL file=%p, rc=SQLITE_NOTFOUND\n", pFile->h)); return SQLITE_NOTFOUND; @@ -51162,27 +50591,23 @@ static int winShmMutexHeld(void) { ** ** The following fields are read-only after the object is created: ** +** fid ** zFilename ** ** Either winShmNode.mutex must be held or winShmNode.nRef==0 and ** winShmMutexHeld() is true when reading or writing any other field ** in this structure. ** -** File-handle hSharedShm is used to (a) take the DMS lock, (b) truncate -** the *-shm file if the DMS-locking protocol demands it, and (c) map -** regions of the *-shm file into memory using MapViewOfFile() or -** similar. Other locks are taken by individual clients using the -** winShm.hShm handles. */ struct winShmNode { sqlite3_mutex *mutex; /* Mutex to access this object */ char *zFilename; /* Name of the file */ - HANDLE hSharedShm; /* File handle open on zFilename */ + winFile hFile; /* File handle from winOpen */ - int isUnlocked; /* DMS lock has not yet been obtained */ - int isReadonly; /* True if read-only */ int szRegion; /* Size of shared-memory regions */ int nRegion; /* Size of array apRegion */ + u8 isReadonly; /* True if read-only */ + u8 isUnlocked; /* True if no DMS lock held */ struct ShmRegion { HANDLE hMap; /* File handle from CreateFileMapping */ @@ -51191,6 +50616,7 @@ struct winShmNode { DWORD lastErrno; /* The Windows errno from the last I/O error */ int nRef; /* Number of winShm objects pointing to this */ + winShm *pFirst; /* All winShm objects pointing to this */ winShmNode *pNext; /* Next in list of all winShmNode objects */ #if defined(SQLITE_DEBUG) || defined(SQLITE_HAVE_OS_TRACE) u8 nextShmId; /* Next available winShm.id value */ @@ -51206,15 +50632,23 @@ static winShmNode *winShmNodeList = 0; /* ** Structure used internally by this VFS to record the state of an -** open shared memory connection. There is one such structure for each -** winFile open on a wal mode database. +** open shared memory connection. +** +** The following fields are initialized when this object is created and +** are read-only thereafter: +** +** winShm.pShmNode +** winShm.id +** +** All other fields are read/write. The winShm.pShmNode->mutex must be held +** while accessing any read/write fields. */ struct winShm { winShmNode *pShmNode; /* The underlying winShmNode object */ + winShm *pNext; /* Next winShm with the same winShmNode */ + u8 hasMutex; /* True if holding the winShmNode mutex */ u16 sharedMask; /* Mask of shared locks held */ u16 exclMask; /* Mask of exclusive locks held */ - HANDLE hShm; /* File-handle on *-shm file. For locking. */ - int bReadonly; /* True if hShm is opened read-only */ #if defined(SQLITE_DEBUG) || defined(SQLITE_HAVE_OS_TRACE) u8 id; /* Id of this connection with its winShmNode */ #endif @@ -51226,6 +50660,50 @@ struct winShm { #define WIN_SHM_BASE ((22+SQLITE_SHM_NLOCK)*4) /* first lock byte */ #define WIN_SHM_DMS (WIN_SHM_BASE+SQLITE_SHM_NLOCK) /* deadman switch */ +/* +** Apply advisory locks for all n bytes beginning at ofst. +*/ +#define WINSHM_UNLCK 1 +#define WINSHM_RDLCK 2 +#define WINSHM_WRLCK 3 +static int winShmSystemLock( + winShmNode *pFile, /* Apply locks to this open shared-memory segment */ + int lockType, /* WINSHM_UNLCK, WINSHM_RDLCK, or WINSHM_WRLCK */ + int ofst, /* Offset to first byte to be locked/unlocked */ + int nByte /* Number of bytes to lock or unlock */ +){ + int rc = 0; /* Result code form Lock/UnlockFileEx() */ + + /* Access to the winShmNode object is serialized by the caller */ + assert( pFile->nRef==0 || sqlite3_mutex_held(pFile->mutex) ); + + OSTRACE(("SHM-LOCK file=%p, lock=%d, offset=%d, size=%d\n", + pFile->hFile.h, lockType, ofst, nByte)); + + /* Release/Acquire the system-level lock */ + if( lockType==WINSHM_UNLCK ){ + rc = winUnlockFile(&pFile->hFile.h, ofst, 0, nByte, 0); + }else{ + /* Initialize the locking parameters */ + DWORD dwFlags = LOCKFILE_FAIL_IMMEDIATELY; + if( lockType == WINSHM_WRLCK ) dwFlags |= LOCKFILE_EXCLUSIVE_LOCK; + rc = winLockFile(&pFile->hFile.h, dwFlags, ofst, 0, nByte, 0); + } + + if( rc!= 0 ){ + rc = SQLITE_OK; + }else{ + pFile->lastErrno = osGetLastError(); + rc = SQLITE_BUSY; + } + + OSTRACE(("SHM-LOCK file=%p, func=%s, errno=%lu, rc=%s\n", + pFile->hFile.h, (lockType == WINSHM_UNLCK) ? "winUnlockFile" : + "winLockFile", pFile->lastErrno, sqlite3ErrName(rc))); + + return rc; +} + /* Forward references to VFS methods */ static int winOpen(sqlite3_vfs*,const char*,sqlite3_file*,int,int*); static int winDelete(sqlite3_vfs *,const char*,int); @@ -51257,7 +50735,11 @@ static void winShmPurge(sqlite3_vfs *pVfs, int deleteFlag){ osGetCurrentProcessId(), i, bRc ? "ok" : "failed")); UNUSED_VARIABLE_VALUE(bRc); } - winHandleClose(p->hSharedShm); + if( p->hFile.h!=NULL && p->hFile.h!=INVALID_HANDLE_VALUE ){ + SimulateIOErrorBenign(1); + winClose((sqlite3_file *)&p->hFile); + SimulateIOErrorBenign(0); + } if( deleteFlag ){ SimulateIOErrorBenign(1); sqlite3BeginBenignMalloc(); @@ -51275,239 +50757,42 @@ static void winShmPurge(sqlite3_vfs *pVfs, int deleteFlag){ } /* -** The DMS lock has not yet been taken on the shm file associated with -** pShmNode. Take the lock. Truncate the *-shm file if required. -** Return SQLITE_OK if successful, or an SQLite error code otherwise. +** The DMS lock has not yet been taken on shm file pShmNode. Attempt to +** take it now. Return SQLITE_OK if successful, or an SQLite error +** code otherwise. +** +** If the DMS cannot be locked because this is a readonly_shm=1 +** connection and no other process already holds a lock, return +** SQLITE_READONLY_CANTINIT and set pShmNode->isUnlocked=1. */ -static int winLockSharedMemory(winShmNode *pShmNode, DWORD nMs){ - HANDLE h = pShmNode->hSharedShm; - int rc = SQLITE_OK; +static int winLockSharedMemory(winShmNode *pShmNode){ + int rc = winShmSystemLock(pShmNode, WINSHM_WRLCK, WIN_SHM_DMS, 1); - assert( sqlite3_mutex_held(pShmNode->mutex) ); - rc = winHandleLockTimeout(h, WIN_SHM_DMS, 1, 1, 0); if( rc==SQLITE_OK ){ - /* We have an EXCLUSIVE lock on the DMS byte. This means that this - ** is the first process to open the file. Truncate it to zero bytes - ** in this case. */ if( pShmNode->isReadonly ){ - rc = SQLITE_READONLY_CANTINIT; - }else{ - rc = winHandleTruncate(h, 0); + pShmNode->isUnlocked = 1; + winShmSystemLock(pShmNode, WINSHM_UNLCK, WIN_SHM_DMS, 1); + return SQLITE_READONLY_CANTINIT; + }else if( winTruncate((sqlite3_file*)&pShmNode->hFile, 0) ){ + winShmSystemLock(pShmNode, WINSHM_UNLCK, WIN_SHM_DMS, 1); + return winLogError(SQLITE_IOERR_SHMOPEN, osGetLastError(), + "winLockSharedMemory", pShmNode->zFilename); } - - /* Release the EXCLUSIVE lock acquired above. */ - winUnlockFile(&h, WIN_SHM_DMS, 0, 1, 0); - }else if( (rc & 0xFF)==SQLITE_BUSY ){ - rc = SQLITE_OK; } if( rc==SQLITE_OK ){ - /* Take a SHARED lock on the DMS byte. */ - rc = winHandleLockTimeout(h, WIN_SHM_DMS, 1, 0, nMs); - if( rc==SQLITE_OK ){ - pShmNode->isUnlocked = 0; - } + winShmSystemLock(pShmNode, WINSHM_UNLCK, WIN_SHM_DMS, 1); } - return rc; + return winShmSystemLock(pShmNode, WINSHM_RDLCK, WIN_SHM_DMS, 1); } - -/* -** Convert a UTF-8 filename into whatever form the underlying -** operating system wants filenames in. Space to hold the result -** is obtained from malloc and must be freed by the calling -** function -** -** On Cygwin, 3 possible input forms are accepted: -** - If the filename starts with ":/" or ":\", -** it is converted to UTF-16 as-is. -** - If the filename contains '/', it is assumed to be a -** Cygwin absolute path, it is converted to a win32 -** absolute path in UTF-16. -** - Otherwise it must be a filename only, the win32 filename -** is returned in UTF-16. -** Note: If the function cygwin_conv_path() fails, only -** UTF-8 -> UTF-16 conversion will be done. This can only -** happen when the file path >32k, in which case winUtf8ToUnicode() -** will fail too. -*/ -static void *winConvertFromUtf8Filename(const char *zFilename){ - void *zConverted = 0; - if( osIsNT() ){ -#ifdef __CYGWIN__ - int nChar; - LPWSTR zWideFilename; - - if( osCygwin_conv_path && !(winIsDriveLetterAndColon(zFilename) - && winIsDirSep(zFilename[2])) ){ - i64 nByte; - int convertflag = CCP_POSIX_TO_WIN_W; - if( !strchr(zFilename, '/') ) convertflag |= CCP_RELATIVE; - nByte = (i64)osCygwin_conv_path(convertflag, - zFilename, 0, 0); - if( nByte>0 ){ - zConverted = sqlite3MallocZero(12+(u64)nByte); - if ( zConverted==0 ){ - return zConverted; - } - zWideFilename = zConverted; - /* Filenames should be prefixed, except when converted - * full path already starts with "\\?\". */ - if( osCygwin_conv_path(convertflag, zFilename, - zWideFilename+4, nByte)==0 ){ - if( (convertflag&CCP_RELATIVE) ){ - memmove(zWideFilename, zWideFilename+4, nByte); - }else if( memcmp(zWideFilename+4, L"\\\\", 4) ){ - memcpy(zWideFilename, L"\\\\?\\", 8); - }else if( zWideFilename[6]!='?' ){ - memmove(zWideFilename+6, zWideFilename+4, nByte); - memcpy(zWideFilename, L"\\\\?\\UNC", 14); - }else{ - memmove(zWideFilename, zWideFilename+4, nByte); - } - return zConverted; - } - sqlite3_free(zConverted); - } - } - nChar = osMultiByteToWideChar(CP_UTF8, 0, zFilename, -1, NULL, 0); - if( nChar==0 ){ - return 0; - } - zWideFilename = sqlite3MallocZero( nChar*sizeof(WCHAR)+12 ); - if( zWideFilename==0 ){ - return 0; - } - nChar = osMultiByteToWideChar(CP_UTF8, 0, zFilename, -1, - zWideFilename, nChar); - if( nChar==0 ){ - sqlite3_free(zWideFilename); - zWideFilename = 0; - }else if( nChar>MAX_PATH - && winIsDriveLetterAndColon(zFilename) - && winIsDirSep(zFilename[2]) ){ - memmove(zWideFilename+4, zWideFilename, nChar*sizeof(WCHAR)); - zWideFilename[2] = '\\'; - memcpy(zWideFilename, L"\\\\?\\", 8); - }else if( nChar>MAX_PATH - && winIsDirSep(zFilename[0]) && winIsDirSep(zFilename[1]) - && zFilename[2] != '?' ){ - memmove(zWideFilename+6, zWideFilename, nChar*sizeof(WCHAR)); - memcpy(zWideFilename, L"\\\\?\\UNC", 14); - } - zConverted = zWideFilename; -#else - zConverted = winUtf8ToUnicode(zFilename); -#endif /* __CYGWIN__ */ - } -#if defined(SQLITE_WIN32_HAS_ANSI) && defined(_WIN32) - else{ - zConverted = winUtf8ToMbcs(zFilename, osAreFileApisANSI()); - } -#endif - /* caller will handle out of memory */ - return zConverted; -} - -/* -** This function is used to open a handle on a *-shm file. -** -** If SQLITE_ENABLE_SETLK_TIMEOUT is defined at build time, then the file -** is opened with FILE_FLAG_OVERLAPPED specified. If not, it is not. -*/ -static int winHandleOpen( - const char *zUtf8, /* File to open */ - int *pbReadonly, /* IN/OUT: True for readonly handle */ - HANDLE *ph /* OUT: New HANDLE for file */ -){ - int rc = SQLITE_OK; - void *zConverted = 0; - int bReadonly = *pbReadonly; - HANDLE h = INVALID_HANDLE_VALUE; - -#ifdef SQLITE_ENABLE_SETLK_TIMEOUT - const DWORD flag_overlapped = FILE_FLAG_OVERLAPPED; -#else - const DWORD flag_overlapped = 0; -#endif - - /* Convert the filename to the system encoding. */ - zConverted = winConvertFromUtf8Filename(zUtf8); - if( zConverted==0 ){ - OSTRACE(("OPEN name=%s, rc=SQLITE_IOERR_NOMEM", zUtf8)); - rc = SQLITE_IOERR_NOMEM_BKPT; - goto winopenfile_out; - } - - /* Ensure the file we are trying to open is not actually a directory. */ - if( winIsDir(zConverted) ){ - OSTRACE(("OPEN name=%s, rc=SQLITE_CANTOPEN_ISDIR", zUtf8)); - rc = SQLITE_CANTOPEN_ISDIR; - goto winopenfile_out; - } - - /* TODO: platforms. - ** TODO: retry-on-ioerr. - */ - if( osIsNT() ){ -#if SQLITE_OS_WINRT - CREATEFILE2_EXTENDED_PARAMETERS extendedParameters; - memset(&extendedParameters, 0, sizeof(extendedParameters)); - extendedParameters.dwSize = sizeof(extendedParameters); - extendedParameters.dwFileAttributes = FILE_ATTRIBUTE_NORMAL; - extendedParameters.dwFileFlags = flag_overlapped; - extendedParameters.dwSecurityQosFlags = SECURITY_ANONYMOUS; - h = osCreateFile2((LPCWSTR)zConverted, - (GENERIC_READ | (bReadonly ? 0 : GENERIC_WRITE)),/* dwDesiredAccess */ - FILE_SHARE_READ | FILE_SHARE_WRITE, /* dwShareMode */ - OPEN_ALWAYS, /* dwCreationDisposition */ - &extendedParameters - ); -#else - h = osCreateFileW((LPCWSTR)zConverted, /* lpFileName */ - (GENERIC_READ | (bReadonly ? 0 : GENERIC_WRITE)), /* dwDesiredAccess */ - FILE_SHARE_READ | FILE_SHARE_WRITE, /* dwShareMode */ - NULL, /* lpSecurityAttributes */ - OPEN_ALWAYS, /* dwCreationDisposition */ - FILE_ATTRIBUTE_NORMAL|flag_overlapped, - NULL - ); -#endif - }else{ - /* Due to pre-processor directives earlier in this file, - ** SQLITE_WIN32_HAS_ANSI is always defined if osIsNT() is false. */ -#ifdef SQLITE_WIN32_HAS_ANSI - h = osCreateFileA((LPCSTR)zConverted, - (GENERIC_READ | (bReadonly ? 0 : GENERIC_WRITE)), /* dwDesiredAccess */ - FILE_SHARE_READ | FILE_SHARE_WRITE, /* dwShareMode */ - NULL, /* lpSecurityAttributes */ - OPEN_ALWAYS, /* dwCreationDisposition */ - FILE_ATTRIBUTE_NORMAL|flag_overlapped, - NULL - ); -#endif - } - - if( h==INVALID_HANDLE_VALUE ){ - if( bReadonly==0 ){ - bReadonly = 1; - rc = winHandleOpen(zUtf8, &bReadonly, &h); - }else{ - rc = SQLITE_CANTOPEN_BKPT; - } - } - - winopenfile_out: - sqlite3_free(zConverted); - *pbReadonly = bReadonly; - *ph = h; - return rc; -} - - /* ** Open the shared-memory area associated with database file pDbFd. +** +** When opening a new shared-memory file, if no other instances of that +** file are currently open, in this process or in other processes, then +** the file must be truncated to zero length or have its header cleared. */ static int winOpenSharedMemory(winFile *pDbFd){ struct winShm *p; /* The connection to be opened */ @@ -51519,83 +50804,98 @@ static int winOpenSharedMemory(winFile *pDbFd){ assert( pDbFd->pShm==0 ); /* Not previously opened */ /* Allocate space for the new sqlite3_shm object. Also speculatively - ** allocate space for a new winShmNode and filename. */ + ** allocate space for a new winShmNode and filename. + */ p = sqlite3MallocZero( sizeof(*p) ); if( p==0 ) return SQLITE_IOERR_NOMEM_BKPT; nName = sqlite3Strlen30(pDbFd->zPath); - pNew = sqlite3MallocZero( sizeof(*pShmNode) + (i64)nName + 17 ); + pNew = sqlite3MallocZero( sizeof(*pShmNode) + nName + 17 ); if( pNew==0 ){ sqlite3_free(p); return SQLITE_IOERR_NOMEM_BKPT; } pNew->zFilename = (char*)&pNew[1]; - pNew->hSharedShm = INVALID_HANDLE_VALUE; - pNew->isUnlocked = 1; sqlite3_snprintf(nName+15, pNew->zFilename, "%s-shm", pDbFd->zPath); sqlite3FileSuffix3(pDbFd->zPath, pNew->zFilename); - /* Open a file-handle on the *-shm file for this connection. This file-handle - ** is only used for locking. The mapping of the *-shm file is created using - ** the shared file handle in winShmNode.hSharedShm. */ - p->bReadonly = sqlite3_uri_boolean(pDbFd->zPath, "readonly_shm", 0); - rc = winHandleOpen(pNew->zFilename, &p->bReadonly, &p->hShm); - /* Look to see if there is an existing winShmNode that can be used. - ** If no matching winShmNode currently exists, then create a new one. */ + ** If no matching winShmNode currently exists, create a new one. + */ winShmEnterMutex(); for(pShmNode = winShmNodeList; pShmNode; pShmNode=pShmNode->pNext){ /* TBD need to come up with better match here. Perhaps - ** use FILE_ID_BOTH_DIR_INFO Structure. */ + ** use FILE_ID_BOTH_DIR_INFO Structure. + */ if( sqlite3StrICmp(pShmNode->zFilename, pNew->zFilename)==0 ) break; } - if( pShmNode==0 ){ - pShmNode = pNew; + if( pShmNode ){ + sqlite3_free(pNew); + }else{ + int inFlags = SQLITE_OPEN_WAL; + int outFlags = 0; + + pShmNode = pNew; + pNew = 0; + ((winFile*)(&pShmNode->hFile))->h = INVALID_HANDLE_VALUE; + pShmNode->pNext = winShmNodeList; + winShmNodeList = pShmNode; - /* Allocate a mutex for this winShmNode object, if one is required. */ if( sqlite3GlobalConfig.bCoreMutex ){ pShmNode->mutex = sqlite3_mutex_alloc(SQLITE_MUTEX_FAST); - if( pShmNode->mutex==0 ) rc = SQLITE_IOERR_NOMEM_BKPT; - } - - /* Open a file-handle to use for mappings, and for the DMS lock. */ - if( rc==SQLITE_OK ){ - HANDLE h = INVALID_HANDLE_VALUE; - pShmNode->isReadonly = p->bReadonly; - rc = winHandleOpen(pNew->zFilename, &pShmNode->isReadonly, &h); - pShmNode->hSharedShm = h; - } - - /* If successful, link the new winShmNode into the global list. If an - ** error occurred, free the object. */ - if( rc==SQLITE_OK ){ - pShmNode->pNext = winShmNodeList; - winShmNodeList = pShmNode; - pNew = 0; - }else{ - sqlite3_mutex_free(pShmNode->mutex); - if( pShmNode->hSharedShm!=INVALID_HANDLE_VALUE ){ - osCloseHandle(pShmNode->hSharedShm); + if( pShmNode->mutex==0 ){ + rc = SQLITE_IOERR_NOMEM_BKPT; + goto shm_open_err; } } + + if( 0==sqlite3_uri_boolean(pDbFd->zPath, "readonly_shm", 0) ){ + inFlags |= SQLITE_OPEN_READWRITE | SQLITE_OPEN_CREATE; + }else{ + inFlags |= SQLITE_OPEN_READONLY; + } + rc = winOpen(pDbFd->pVfs, pShmNode->zFilename, + (sqlite3_file*)&pShmNode->hFile, + inFlags, &outFlags); + if( rc!=SQLITE_OK ){ + rc = winLogError(rc, osGetLastError(), "winOpenShm", + pShmNode->zFilename); + goto shm_open_err; + } + if( outFlags==SQLITE_OPEN_READONLY ) pShmNode->isReadonly = 1; + + rc = winLockSharedMemory(pShmNode); + if( rc!=SQLITE_OK && rc!=SQLITE_READONLY_CANTINIT ) goto shm_open_err; } - /* If no error has occurred, link the winShm object to the winShmNode and - ** the winShm to pDbFd. */ - if( rc==SQLITE_OK ){ - p->pShmNode = pShmNode; - pShmNode->nRef++; + /* Make the new connection a child of the winShmNode */ + p->pShmNode = pShmNode; #if defined(SQLITE_DEBUG) || defined(SQLITE_HAVE_OS_TRACE) - p->id = pShmNode->nextShmId++; + p->id = pShmNode->nextShmId++; #endif - pDbFd->pShm = p; - }else if( p ){ - winHandleClose(p->hShm); - sqlite3_free(p); - } - - assert( rc!=SQLITE_OK || pShmNode->isUnlocked==0 || pShmNode->nRegion==0 ); + pShmNode->nRef++; + pDbFd->pShm = p; winShmLeaveMutex(); + + /* The reference count on pShmNode has already been incremented under + ** the cover of the winShmEnterMutex() mutex and the pointer from the + ** new (struct winShm) object to the pShmNode has been set. All that is + ** left to do is to link the new object into the linked list starting + ** at pShmNode->pFirst. This must be done while holding the pShmNode->mutex + ** mutex. + */ + sqlite3_mutex_enter(pShmNode->mutex); + p->pNext = pShmNode->pFirst; + pShmNode->pFirst = p; + sqlite3_mutex_leave(pShmNode->mutex); + return rc; + + /* Jump here on any error */ +shm_open_err: + winShmSystemLock(pShmNode, WINSHM_UNLCK, WIN_SHM_DMS, 1); + winShmPurge(pDbFd->pVfs, 0); /* This call frees pShmNode if required */ + sqlite3_free(p); sqlite3_free(pNew); + winShmLeaveMutex(); return rc; } @@ -51610,19 +50910,27 @@ static int winShmUnmap( winFile *pDbFd; /* Database holding shared-memory */ winShm *p; /* The connection to be closed */ winShmNode *pShmNode; /* The underlying shared-memory file */ + winShm **pp; /* For looping over sibling connections */ pDbFd = (winFile*)fd; p = pDbFd->pShm; if( p==0 ) return SQLITE_OK; - if( p->hShm!=INVALID_HANDLE_VALUE ){ - osCloseHandle(p->hShm); - } - pShmNode = p->pShmNode; - winShmEnterMutex(); + + /* Remove connection p from the set of connections associated + ** with pShmNode */ + sqlite3_mutex_enter(pShmNode->mutex); + for(pp=&pShmNode->pFirst; (*pp)!=p; pp = &(*pp)->pNext){} + *pp = p->pNext; + + /* Free the connection p */ + sqlite3_free(p); + pDbFd->pShm = 0; + sqlite3_mutex_leave(pShmNode->mutex); /* If pShmNode->nRef has reached 0, then close the underlying - ** shared-memory file, too. */ + ** shared-memory file, too */ + winShmEnterMutex(); assert( pShmNode->nRef>0 ); pShmNode->nRef--; if( pShmNode->nRef==0 ){ @@ -51630,9 +50938,6 @@ static int winShmUnmap( } winShmLeaveMutex(); - /* Free the connection p */ - sqlite3_free(p); - pDbFd->pShm = 0; return SQLITE_OK; } @@ -51647,9 +50952,10 @@ static int winShmLock( ){ winFile *pDbFd = (winFile*)fd; /* Connection holding shared memory */ winShm *p = pDbFd->pShm; /* The shared memory being locked */ + winShm *pX; /* For looping over all siblings */ winShmNode *pShmNode; int rc = SQLITE_OK; /* Result code */ - u16 mask = (u16)((1U<<(ofst+n)) - (1U<pShmNode; @@ -51663,81 +50969,85 @@ static int winShmLock( || flags==(SQLITE_SHM_UNLOCK | SQLITE_SHM_EXCLUSIVE) ); assert( n==1 || (flags & SQLITE_SHM_EXCLUSIVE)!=0 ); - /* Check that, if this to be a blocking lock, no locks that occur later - ** in the following list than the lock being obtained are already held: - ** - ** 1. Recovery lock (ofst==2). - ** 2. Checkpointer lock (ofst==1). - ** 3. Write lock (ofst==0). - ** 4. Read locks (ofst>=3 && ofstexclMask|p->sharedMask); - assert( (flags & SQLITE_SHM_UNLOCK) || pDbFd->iBusyTimeout==0 || ( - (ofst!=2 || lockMask==0) - && (ofst!=1 || lockMask==0 || lockMask==2) - && (ofst!=0 || lockMask<3) - && (ofst<3 || lockMask<(1<1 || mask==(1<mutex); + if( flags & SQLITE_SHM_UNLOCK ){ + u16 allMask = 0; /* Mask of locks held by siblings */ - /* Check if there is any work to do. There are three cases: - ** - ** a) An unlock operation where there are locks to unlock, - ** b) An shared lock where the requested lock is not already held - ** c) An exclusive lock where the requested lock is not already held - ** - ** The SQLite core never requests an exclusive lock that it already holds. - ** This is assert()ed immediately below. */ - assert( flags!=(SQLITE_SHM_EXCLUSIVE|SQLITE_SHM_LOCK) - || 0==(p->exclMask & mask) - ); - if( ((flags & SQLITE_SHM_UNLOCK) && ((p->exclMask|p->sharedMask) & mask)) - || (flags==(SQLITE_SHM_SHARED|SQLITE_SHM_LOCK) && 0==(p->sharedMask & mask)) - || (flags==(SQLITE_SHM_EXCLUSIVE|SQLITE_SHM_LOCK)) - ){ + /* See if any siblings hold this same lock */ + for(pX=pShmNode->pFirst; pX; pX=pX->pNext){ + if( pX==p ) continue; + assert( (pX->exclMask & (p->exclMask|p->sharedMask))==0 ); + allMask |= pX->sharedMask; + } - if( flags & SQLITE_SHM_UNLOCK ){ - /* Case (a) - unlock. */ - - assert( (p->exclMask & p->sharedMask)==0 ); - assert( !(flags & SQLITE_SHM_EXCLUSIVE) || (p->exclMask & mask)==mask ); - assert( !(flags & SQLITE_SHM_SHARED) || (p->sharedMask & mask)==mask ); - - rc = winHandleUnlock(p->hShm, ofst+WIN_SHM_BASE, n); - - /* If successful, also clear the bits in sharedMask/exclMask */ - if( rc==SQLITE_OK ){ - p->exclMask = (p->exclMask & ~mask); - p->sharedMask = (p->sharedMask & ~mask); - } + /* Unlock the system-level locks */ + if( (mask & allMask)==0 ){ + rc = winShmSystemLock(pShmNode, WINSHM_UNLCK, ofst+WIN_SHM_BASE, n); }else{ - int bExcl = ((flags & SQLITE_SHM_EXCLUSIVE) ? 1 : 0); - DWORD nMs = winFileBusyTimeout(pDbFd); - rc = winHandleLockTimeout(p->hShm, ofst+WIN_SHM_BASE, n, bExcl, nMs); + rc = SQLITE_OK; + } + + /* Undo the local locks */ + if( rc==SQLITE_OK ){ + p->exclMask &= ~mask; + p->sharedMask &= ~mask; + } + }else if( flags & SQLITE_SHM_SHARED ){ + u16 allShared = 0; /* Union of locks held by connections other than "p" */ + + /* Find out which shared locks are already held by sibling connections. + ** If any sibling already holds an exclusive lock, go ahead and return + ** SQLITE_BUSY. + */ + for(pX=pShmNode->pFirst; pX; pX=pX->pNext){ + if( (pX->exclMask & mask)!=0 ){ + rc = SQLITE_BUSY; + break; + } + allShared |= pX->sharedMask; + } + + /* Get shared locks at the system level, if necessary */ + if( rc==SQLITE_OK ){ + if( (allShared & mask)==0 ){ + rc = winShmSystemLock(pShmNode, WINSHM_RDLCK, ofst+WIN_SHM_BASE, n); + }else{ + rc = SQLITE_OK; + } + } + + /* Get the local shared locks */ + if( rc==SQLITE_OK ){ + p->sharedMask |= mask; + } + }else{ + /* Make sure no sibling connections hold locks that will block this + ** lock. If any do, return SQLITE_BUSY right away. + */ + for(pX=pShmNode->pFirst; pX; pX=pX->pNext){ + if( (pX->exclMask & mask)!=0 || (pX->sharedMask & mask)!=0 ){ + rc = SQLITE_BUSY; + break; + } + } + + /* Get the exclusive locks at the system level. Then if successful + ** also mark the local connection as being locked. + */ + if( rc==SQLITE_OK ){ + rc = winShmSystemLock(pShmNode, WINSHM_WRLCK, ofst+WIN_SHM_BASE, n); if( rc==SQLITE_OK ){ - if( bExcl ){ - p->exclMask = (p->exclMask | mask); - }else{ - p->sharedMask = (p->sharedMask | mask); - } + assert( (p->sharedMask & mask)==0 ); + p->exclMask |= mask; } } } - - OSTRACE(( - "SHM-LOCK(%d,%d,%d) pid=%lu, id=%d, sharedMask=%03x, exclMask=%03x," - " rc=%s\n", - ofst, n, flags, - osGetCurrentProcessId(), p->id, p->sharedMask, p->exclMask, - sqlite3ErrName(rc)) - ); + sqlite3_mutex_leave(pShmNode->mutex); + OSTRACE(("SHM-LOCK pid=%lu, id=%d, sharedMask=%03x, exclMask=%03x, rc=%s\n", + osGetCurrentProcessId(), p->id, p->sharedMask, p->exclMask, + sqlite3ErrName(rc))); return rc; } @@ -51799,15 +51109,13 @@ static int winShmMap( sqlite3_mutex_enter(pShmNode->mutex); if( pShmNode->isUnlocked ){ - /* Take the DMS lock. */ - assert( pShmNode->nRegion==0 ); - rc = winLockSharedMemory(pShmNode, winFileBusyTimeout(pDbFd)); + rc = winLockSharedMemory(pShmNode); if( rc!=SQLITE_OK ) goto shmpage_out; + pShmNode->isUnlocked = 0; } - assert( szRegion==pShmNode->szRegion || pShmNode->nRegion==0 ); + if( pShmNode->nRegion<=iRegion ){ - HANDLE hShared = pShmNode->hSharedShm; struct ShmRegion *apNew; /* New aRegion[] array */ int nByte = (iRegion+1)*szRegion; /* Minimum required file size */ sqlite3_int64 sz; /* Current size of wal-index file */ @@ -51818,9 +51126,10 @@ static int winShmMap( ** Check to see if it has been allocated (i.e. if the wal-index file is ** large enough to contain the requested region). */ - rc = winHandleSize(hShared, &sz); + rc = winFileSize((sqlite3_file *)&pShmNode->hFile, &sz); if( rc!=SQLITE_OK ){ - rc = winLogError(rc, osGetLastError(), "winShmMap1", pDbFd->zPath); + rc = winLogError(SQLITE_IOERR_SHMSIZE, osGetLastError(), + "winShmMap1", pDbFd->zPath); goto shmpage_out; } @@ -51829,17 +51138,19 @@ static int winShmMap( ** zero, exit early. *pp will be set to NULL and SQLITE_OK returned. ** ** Alternatively, if isWrite is non-zero, use ftruncate() to allocate - ** the requested memory region. */ + ** the requested memory region. + */ if( !isWrite ) goto shmpage_out; - rc = winHandleTruncate(hShared, nByte); + rc = winTruncate((sqlite3_file *)&pShmNode->hFile, nByte); if( rc!=SQLITE_OK ){ - rc = winLogError(rc, osGetLastError(), "winShmMap2", pDbFd->zPath); + rc = winLogError(SQLITE_IOERR_SHMSIZE, osGetLastError(), + "winShmMap2", pDbFd->zPath); goto shmpage_out; } } /* Map the requested memory region into this processes address space. */ - apNew = (struct ShmRegion*)sqlite3_realloc64( + apNew = (struct ShmRegion *)sqlite3_realloc64( pShmNode->aRegion, (iRegion+1)*sizeof(apNew[0]) ); if( !apNew ){ @@ -51858,13 +51169,18 @@ static int winShmMap( void *pMap = 0; /* Mapped memory region */ #if SQLITE_OS_WINRT - hMap = osCreateFileMappingFromApp(hShared, NULL, protect, nByte, NULL); + hMap = osCreateFileMappingFromApp(pShmNode->hFile.h, + NULL, protect, nByte, NULL + ); #elif defined(SQLITE_WIN32_HAS_WIDE) - hMap = osCreateFileMappingW(hShared, NULL, protect, 0, nByte, NULL); + hMap = osCreateFileMappingW(pShmNode->hFile.h, + NULL, protect, 0, nByte, NULL + ); #elif defined(SQLITE_WIN32_HAS_ANSI) && SQLITE_WIN32_CREATEFILEMAPPINGA - hMap = osCreateFileMappingA(hShared, NULL, protect, 0, nByte, NULL); + hMap = osCreateFileMappingA(pShmNode->hFile.h, + NULL, protect, 0, nByte, NULL + ); #endif - OSTRACE(("SHM-MAP-CREATE pid=%lu, region=%d, size=%d, rc=%s\n", osGetCurrentProcessId(), pShmNode->nRegion, nByte, hMap ? "ok" : "failed")); @@ -51907,9 +51223,7 @@ shmpage_out: }else{ *pp = 0; } - if( pShmNode->isReadonly && rc==SQLITE_OK ){ - rc = SQLITE_READONLY; - } + if( pShmNode->isReadonly && rc==SQLITE_OK ) rc = SQLITE_READONLY; sqlite3_mutex_leave(pShmNode->mutex); return rc; } @@ -52229,6 +51543,47 @@ static winVfsAppData winNolockAppData = { ** sqlite3_vfs object. */ +#if defined(__CYGWIN__) +/* +** Convert a filename from whatever the underlying operating system +** supports for filenames into UTF-8. Space to hold the result is +** obtained from malloc and must be freed by the calling function. +*/ +static char *winConvertToUtf8Filename(const void *zFilename){ + char *zConverted = 0; + if( osIsNT() ){ + zConverted = winUnicodeToUtf8(zFilename); + } +#ifdef SQLITE_WIN32_HAS_ANSI + else{ + zConverted = winMbcsToUtf8(zFilename, osAreFileApisANSI()); + } +#endif + /* caller will handle out of memory */ + return zConverted; +} +#endif + +/* +** Convert a UTF-8 filename into whatever form the underlying +** operating system wants filenames in. Space to hold the result +** is obtained from malloc and must be freed by the calling +** function. +*/ +static void *winConvertFromUtf8Filename(const char *zFilename){ + void *zConverted = 0; + if( osIsNT() ){ + zConverted = winUtf8ToUnicode(zFilename); + } +#ifdef SQLITE_WIN32_HAS_ANSI + else{ + zConverted = winUtf8ToMbcs(zFilename, osAreFileApisANSI()); + } +#endif + /* caller will handle out of memory */ + return zConverted; +} + /* ** This function returns non-zero if the specified UTF-8 string buffer ** ends with a directory separator character or one was successfully @@ -52241,14 +51596,7 @@ static int winMakeEndInDirSep(int nBuf, char *zBuf){ if( winIsDirSep(zBuf[nLen-1]) ){ return 1; }else if( nLen+1mxPathname; - nBuf = 2 + (i64)nMax; + nMax = pVfs->mxPathname; nBuf = nMax + 2; zBuf = sqlite3MallocZero( nBuf ); if( !zBuf ){ OSTRACE(("TEMP-FILENAME rc=SQLITE_IOERR_NOMEM\n")); @@ -52326,7 +51673,7 @@ static int winGetTempname(sqlite3_vfs *pVfs, char **pzBuf){ } #if defined(__CYGWIN__) - else if( osGetenv!=NULL ){ + else{ static const char *azDirs[] = { 0, /* getenv("SQLITE_TMPDIR") */ 0, /* getenv("TMPDIR") */ @@ -52342,11 +51689,11 @@ static int winGetTempname(sqlite3_vfs *pVfs, char **pzBuf){ unsigned int i; const char *zDir = 0; - if( !azDirs[0] ) azDirs[0] = osGetenv("SQLITE_TMPDIR"); - if( !azDirs[1] ) azDirs[1] = osGetenv("TMPDIR"); - if( !azDirs[2] ) azDirs[2] = osGetenv("TMP"); - if( !azDirs[3] ) azDirs[3] = osGetenv("TEMP"); - if( !azDirs[4] ) azDirs[4] = osGetenv("USERPROFILE"); + if( !azDirs[0] ) azDirs[0] = getenv("SQLITE_TMPDIR"); + if( !azDirs[1] ) azDirs[1] = getenv("TMPDIR"); + if( !azDirs[2] ) azDirs[2] = getenv("TMP"); + if( !azDirs[3] ) azDirs[3] = getenv("TEMP"); + if( !azDirs[4] ) azDirs[4] = getenv("USERPROFILE"); for(i=0; inOut ){ - /* SQLite assumes that xFullPathname() nul-terminates the output buffer - ** even if it returns an error. */ - zOut[iOff] = '\0'; - return SQLITE_CANTOPEN_BKPT; - } - sqlite3_snprintf(nOut-iOff, &zOut[iOff], "%s", zPath); - return SQLITE_OK; -} -#endif /* __CYGWIN__ */ /* ** Turn a relative pathname into a full pathname. Write the full @@ -53178,8 +52476,8 @@ static int winFullPathnameNoMutex( int nFull, /* Size of output buffer in bytes */ char *zFull /* Output buffer */ ){ -#if !SQLITE_OS_WINCE && !SQLITE_OS_WINRT - int nByte; +#if !SQLITE_OS_WINCE && !SQLITE_OS_WINRT && !defined(__CYGWIN__) + DWORD nByte; void *zConverted; char *zOut; #endif @@ -53192,82 +52490,64 @@ static int winFullPathnameNoMutex( zRelative++; } +#if defined(__CYGWIN__) SimulateIOError( return SQLITE_ERROR ); - -#ifdef __CYGWIN__ - if( osGetcwd ){ - zFull[nFull-1] = '\0'; - if( !winIsDriveLetterAndColon(zRelative) || !winIsDirSep(zRelative[2]) ){ - int rc = SQLITE_OK; - int nLink = 1; /* Number of symbolic links followed so far */ - const char *zIn = zRelative; /* Input path for each iteration of loop */ - char *zDel = 0; - struct stat buf; - - UNUSED_PARAMETER(pVfs); - - do { - /* Call lstat() on path zIn. Set bLink to true if the path is a symbolic - ** link, or false otherwise. */ - int bLink = 0; - if( osLstat && osReadlink ) { - if( osLstat(zIn, &buf)!=0 ){ - int myErrno = osErrno; - if( myErrno!=ENOENT ){ - rc = winLogError(SQLITE_CANTOPEN_BKPT, (DWORD)myErrno, "lstat", zIn); - } - }else{ - bLink = ((buf.st_mode & 0170000) == 0120000); - } - - if( bLink ){ - if( zDel==0 ){ - zDel = sqlite3MallocZero(nFull); - if( zDel==0 ) rc = SQLITE_NOMEM; - }else if( ++nLink>SQLITE_MAX_SYMLINKS ){ - rc = SQLITE_CANTOPEN_BKPT; - } - - if( rc==SQLITE_OK ){ - nByte = osReadlink(zIn, zDel, nFull-1); - if( nByte ==(DWORD)-1 ){ - rc = winLogError(SQLITE_CANTOPEN_BKPT, (DWORD)osErrno, "readlink", zIn); - }else{ - if( zDel[0]!='/' ){ - int n; - for(n = sqlite3Strlen30(zIn); n>0 && zIn[n-1]!='/'; n--); - if( nByte+n+1>nFull ){ - rc = SQLITE_CANTOPEN_BKPT; - }else{ - memmove(&zDel[n], zDel, nByte+1); - memcpy(zDel, zIn, n); - nByte += n; - } - } - zDel[nByte] = '\0'; - } - } - - zIn = zDel; - } - } - - assert( rc!=SQLITE_OK || zIn!=zFull || zIn[0]=='/' ); - if( rc==SQLITE_OK && zIn!=zFull ){ - rc = mkFullPathname(zIn, zFull, nFull); - } - if( bLink==0 ) break; - zIn = zFull; - }while( rc==SQLITE_OK ); - - sqlite3_free(zDel); - winSimplifyName(zFull); - return rc; + UNUSED_PARAMETER(nFull); + assert( nFull>=pVfs->mxPathname ); + if ( sqlite3_data_directory && !winIsVerbatimPathname(zRelative) ){ + /* + ** NOTE: We are dealing with a relative path name and the data + ** directory has been set. Therefore, use it as the basis + ** for converting the relative path name to an absolute + ** one by prepending the data directory and a slash. + */ + char *zOut = sqlite3MallocZero( pVfs->mxPathname+1 ); + if( !zOut ){ + return SQLITE_IOERR_NOMEM_BKPT; + } + if( cygwin_conv_path( + (osIsNT() ? CCP_POSIX_TO_WIN_W : CCP_POSIX_TO_WIN_A) | + CCP_RELATIVE, zRelative, zOut, pVfs->mxPathname+1)<0 ){ + sqlite3_free(zOut); + return winLogError(SQLITE_CANTOPEN_CONVPATH, (DWORD)errno, + "winFullPathname1", zRelative); + }else{ + char *zUtf8 = winConvertToUtf8Filename(zOut); + if( !zUtf8 ){ + sqlite3_free(zOut); + return SQLITE_IOERR_NOMEM_BKPT; + } + sqlite3_snprintf(MIN(nFull, pVfs->mxPathname), zFull, "%s%c%s", + sqlite3_data_directory, winGetDirSep(), zUtf8); + sqlite3_free(zUtf8); + sqlite3_free(zOut); + } + }else{ + char *zOut = sqlite3MallocZero( pVfs->mxPathname+1 ); + if( !zOut ){ + return SQLITE_IOERR_NOMEM_BKPT; + } + if( cygwin_conv_path( + (osIsNT() ? CCP_POSIX_TO_WIN_W : CCP_POSIX_TO_WIN_A), + zRelative, zOut, pVfs->mxPathname+1)<0 ){ + sqlite3_free(zOut); + return winLogError(SQLITE_CANTOPEN_CONVPATH, (DWORD)errno, + "winFullPathname2", zRelative); + }else{ + char *zUtf8 = winConvertToUtf8Filename(zOut); + if( !zUtf8 ){ + sqlite3_free(zOut); + return SQLITE_IOERR_NOMEM_BKPT; + } + sqlite3_snprintf(MIN(nFull, pVfs->mxPathname), zFull, "%s", zUtf8); + sqlite3_free(zUtf8); + sqlite3_free(zOut); } } -#endif /* __CYGWIN__ */ + return SQLITE_OK; +#endif -#if (SQLITE_OS_WINCE || SQLITE_OS_WINRT) && defined(_WIN32) +#if (SQLITE_OS_WINCE || SQLITE_OS_WINRT) && !defined(__CYGWIN__) SimulateIOError( return SQLITE_ERROR ); /* WinCE has no concept of a relative pathname, or so I am told. */ /* WinRT has no way to convert a relative path to an absolute one. */ @@ -53286,8 +52566,7 @@ static int winFullPathnameNoMutex( return SQLITE_OK; #endif -#if !SQLITE_OS_WINCE && !SQLITE_OS_WINRT -#if defined(_WIN32) +#if !SQLITE_OS_WINCE && !SQLITE_OS_WINRT && !defined(__CYGWIN__) /* It's odd to simulate an io-error here, but really this is just ** using the io-error infrastructure to test that SQLite handles this ** function failing. This function could fail if, for example, the @@ -53305,7 +52584,6 @@ static int winFullPathnameNoMutex( sqlite3_data_directory, winGetDirSep(), zRelative); return SQLITE_OK; } -#endif zConverted = winConvertFromUtf8Filename(zRelative); if( zConverted==0 ){ return SQLITE_IOERR_NOMEM_BKPT; @@ -53344,12 +52622,13 @@ static int winFullPathnameNoMutex( return winLogError(SQLITE_CANTOPEN_FULLPATH, osGetLastError(), "winFullPathname3", zRelative); } - zTemp = sqlite3MallocZero( nByte*sizeof(zTemp[0]) + 3*sizeof(zTemp[0]) ); + nByte += 3; + zTemp = sqlite3MallocZero( nByte*sizeof(zTemp[0]) ); if( zTemp==0 ){ sqlite3_free(zConverted); return SQLITE_IOERR_NOMEM_BKPT; } - nByte = osGetFullPathNameA((char*)zConverted, nByte+3, zTemp, 0); + nByte = osGetFullPathNameA((char*)zConverted, nByte, zTemp, 0); if( nByte==0 ){ sqlite3_free(zConverted); sqlite3_free(zTemp); @@ -53362,26 +52641,7 @@ static int winFullPathnameNoMutex( } #endif if( zOut ){ -#ifdef __CYGWIN__ - if( memcmp(zOut, "\\\\?\\", 4) ){ - sqlite3_snprintf(MIN(nFull, pVfs->mxPathname), zFull, "%s", zOut); - }else if( memcmp(zOut+4, "UNC\\", 4) ){ - sqlite3_snprintf(MIN(nFull, pVfs->mxPathname), zFull, "%s", zOut+4); - }else{ - char *p = zOut+6; - *p = '\\'; - if( osGetcwd ){ - /* On Cygwin, UNC paths use forward slashes */ - while( *p ){ - if( *p=='\\' ) *p = '/'; - ++p; - } - } - sqlite3_snprintf(MIN(nFull, pVfs->mxPathname), zFull, "%s", zOut+6); - } -#else sqlite3_snprintf(MIN(nFull, pVfs->mxPathname), zFull, "%s", zOut); -#endif /* __CYGWIN__ */ sqlite3_free(zOut); return SQLITE_OK; }else{ @@ -53411,8 +52671,25 @@ static int winFullPathname( */ static void *winDlOpen(sqlite3_vfs *pVfs, const char *zFilename){ HANDLE h; +#if defined(__CYGWIN__) + int nFull = pVfs->mxPathname+1; + char *zFull = sqlite3MallocZero( nFull ); + void *zConverted = 0; + if( zFull==0 ){ + OSTRACE(("DLOPEN name=%s, handle=%p\n", zFilename, (void*)0)); + return 0; + } + if( winFullPathname(pVfs, zFilename, nFull, zFull)!=SQLITE_OK ){ + sqlite3_free(zFull); + OSTRACE(("DLOPEN name=%s, handle=%p\n", zFilename, (void*)0)); + return 0; + } + zConverted = winConvertFromUtf8Filename(zFull); + sqlite3_free(zFull); +#else void *zConverted = winConvertFromUtf8Filename(zFilename); UNUSED_PARAMETER(pVfs); +#endif if( zConverted==0 ){ OSTRACE(("DLOPEN name=%s, handle=%p\n", zFilename, (void*)0)); return 0; @@ -53761,7 +53038,7 @@ SQLITE_API int sqlite3_os_init(void){ /* Double-check that the aSyscall[] array has been constructed ** correctly. See ticket [bb3a86e890c8e96ab] */ - assert( ArraySize(aSyscall)==89 ); + assert( ArraySize(aSyscall)==80 ); /* get memory map allocation granularity */ memset(&winSysInfo, 0, sizeof(SYSTEM_INFO)); @@ -54380,13 +53657,13 @@ static int memdbOpen( } if( p==0 ){ MemStore **apNew; - p = sqlite3Malloc( sizeof(*p) + (i64)szName + 3 ); + p = sqlite3Malloc( sizeof(*p) + szName + 3 ); if( p==0 ){ sqlite3_mutex_leave(pVfsMutex); return SQLITE_NOMEM; } apNew = sqlite3Realloc(memdb_g.apMemStore, - sizeof(apNew[0])*(1+(i64)memdb_g.nMemStore) ); + sizeof(apNew[0])*(memdb_g.nMemStore+1) ); if( apNew==0 ){ sqlite3_free(p); sqlite3_mutex_leave(pVfsMutex); @@ -54819,7 +54096,7 @@ SQLITE_PRIVATE int sqlite3MemdbInit(void){ ** no fewer collisions than the no-op *1. */ #define BITVEC_HASH(X) (((X)*1)%BITVEC_NINT) -#define BITVEC_NPTR ((u32)(BITVEC_USIZE/sizeof(Bitvec *))) +#define BITVEC_NPTR (BITVEC_USIZE/sizeof(Bitvec *)) /* @@ -54968,9 +54245,7 @@ bitvec_set_rehash: }else{ memcpy(aiValues, p->u.aHash, sizeof(p->u.aHash)); memset(p->u.apSub, 0, sizeof(p->u.apSub)); - p->iDivisor = p->iSize/BITVEC_NPTR; - if( (p->iSize%BITVEC_NPTR)!=0 ) p->iDivisor++; - if( p->iDivisoriDivisor = BITVEC_NBIT; + p->iDivisor = (p->iSize + BITVEC_NPTR - 1)/BITVEC_NPTR; rc = sqlite3BitvecSet(p, i); for(j=0; jiSize<=BITVEC_NBIT ){ - p->u.aBitmap[i/BITVEC_SZELEM] &= ~(BITVEC_TELEM)(1<<(i&(BITVEC_SZELEM-1))); + p->u.aBitmap[i/BITVEC_SZELEM] &= ~(1 << (i&(BITVEC_SZELEM-1))); }else{ unsigned int j; u32 *aiValues = pBuf; @@ -55055,7 +54330,7 @@ SQLITE_PRIVATE u32 sqlite3BitvecSize(Bitvec *p){ ** individual bits within V. */ #define SETBIT(V,I) V[I>>3] |= (1<<(I&7)) -#define CLEARBIT(V,I) V[I>>3] &= ~(BITVEC_TELEM)(1<<(I&7)) +#define CLEARBIT(V,I) V[I>>3] &= ~(1<<(I&7)) #define TESTBIT(V,I) (V[I>>3]&(1<<(I&7)))!=0 /* @@ -55098,7 +54373,7 @@ SQLITE_PRIVATE int sqlite3BitvecBuiltinTest(int sz, int *aOp){ /* Allocate the Bitvec to be tested and a linear array of ** bits to act as the reference */ pBitvec = sqlite3BitvecCreate( sz ); - pV = sqlite3MallocZero( (7+(i64)sz)/8 + 1 ); + pV = sqlite3MallocZero( (sz+7)/8 + 1 ); pTmpSpace = sqlite3_malloc64(BITVEC_SZ); if( pBitvec==0 || pV==0 || pTmpSpace==0 ) goto bitvec_end; @@ -56339,6 +55614,10 @@ static SQLITE_WSD struct PCacheGlobal { sqlite3_mutex *mutex; /* Mutex for accessing the following: */ PgFreeslot *pFree; /* Free page blocks */ int nFreeSlot; /* Number of unused pcache slots */ + /* The following value requires a mutex to change. We skip the mutex on + ** reading because (1) most platforms read a 32-bit integer atomically and + ** (2) even if an incorrect value is read, no great harm is done since this + ** is really just an optimization. */ int bUnderPressure; /* True if low on PAGECACHE memory */ } pcache1_g; @@ -56386,7 +55665,7 @@ SQLITE_PRIVATE void sqlite3PCacheBufferSetup(void *pBuf, int sz, int n){ pcache1.nReserve = n>90 ? 10 : (n/10 + 1); pcache1.pStart = pBuf; pcache1.pFree = 0; - AtomicStore(&pcache1.bUnderPressure,0); + pcache1.bUnderPressure = 0; while( n-- ){ p = (PgFreeslot*)pBuf; p->pNext = pcache1.pFree; @@ -56454,7 +55733,7 @@ static void *pcache1Alloc(int nByte){ if( p ){ pcache1.pFree = pcache1.pFree->pNext; pcache1.nFreeSlot--; - AtomicStore(&pcache1.bUnderPressure,pcache1.nFreeSlot=0 ); sqlite3StatusHighwater(SQLITE_STATUS_PAGECACHE_SIZE, nByte); sqlite3StatusUp(SQLITE_STATUS_PAGECACHE_USED, 1); @@ -56493,7 +55772,7 @@ static void pcache1Free(void *p){ pSlot->pNext = pcache1.pFree; pcache1.pFree = pSlot; pcache1.nFreeSlot++; - AtomicStore(&pcache1.bUnderPressure,pcache1.nFreeSlotszPage+pCache->szExtra)<=pcache1.szSlot ){ - return AtomicLoad(&pcache1.bUnderPressure); + return pcache1.bUnderPressure; }else{ return sqlite3HeapNearlyFull(); } @@ -56641,12 +55920,12 @@ static int pcache1UnderMemoryPressure(PCache1 *pCache){ */ static void pcache1ResizeHash(PCache1 *p){ PgHdr1 **apNew; - u64 nNew; - u32 i; + unsigned int nNew; + unsigned int i; assert( sqlite3_mutex_held(p->pGroup->mutex) ); - nNew = 2*(u64)p->nHash; + nNew = p->nHash*2; if( nNew<256 ){ nNew = 256; } @@ -56869,7 +56148,7 @@ static void pcache1Destroy(sqlite3_pcache *p); static sqlite3_pcache *pcache1Create(int szPage, int szExtra, int bPurgeable){ PCache1 *pCache; /* The newly created page cache */ PGroup *pGroup; /* The group the new page cache will belong to */ - i64 sz; /* Bytes of memory required to allocate the new cache */ + int sz; /* Bytes of memory required to allocate the new cache */ assert( (szPage & (szPage-1))==0 && szPage>=512 && szPage<=65536 ); assert( szExtra < 300 ); @@ -58757,9 +58036,6 @@ struct Pager { Wal *pWal; /* Write-ahead log used by "journal_mode=wal" */ char *zWal; /* File name for write-ahead log */ #endif -#ifdef SQLITE_ENABLE_SETLK_TIMEOUT - sqlite3 *dbWal; -#endif }; /* @@ -59351,7 +58627,7 @@ static void checkPage(PgHdr *pPg){ ** If an error occurs while reading from the journal file, an SQLite ** error code is returned. */ -static int readSuperJournal(sqlite3_file *pJrnl, char *zSuper, u64 nSuper){ +static int readSuperJournal(sqlite3_file *pJrnl, char *zSuper, u32 nSuper){ int rc; /* Return code */ u32 len; /* Length in bytes of super-journal name */ i64 szJ; /* Total size in bytes of journal file pJrnl */ @@ -59906,15 +59182,6 @@ static void pager_unlock(Pager *pPager){ if( pagerUseWal(pPager) ){ assert( !isOpen(pPager->jfd) ); - if( pPager->eState==PAGER_ERROR ){ - /* If an IO error occurs in wal.c while attempting to wrap the wal file, - ** then the Wal object may be holding a write-lock but no read-lock. - ** This call ensures that the write-lock is dropped as well. We cannot - ** have sqlite3WalEndReadTransaction() drop the write-lock, as it once - ** did, because this would break "BEGIN EXCLUSIVE" handling for - ** SQLITE_ENABLE_SETLK_TIMEOUT builds. */ - sqlite3WalEndWriteTransaction(pPager->pWal); - } sqlite3WalEndReadTransaction(pPager->pWal); pPager->eState = PAGER_OPEN; }else if( !pPager->exclusiveMode ){ @@ -60596,12 +59863,12 @@ static int pager_delsuper(Pager *pPager, const char *zSuper){ char *zJournal; /* Pointer to one journal within MJ file */ char *zSuperPtr; /* Space to hold super-journal filename */ char *zFree = 0; /* Free this buffer */ - i64 nSuperPtr; /* Amount of space allocated to zSuperPtr[] */ + int nSuperPtr; /* Amount of space allocated to zSuperPtr[] */ /* Allocate space for both the pJournal and pSuper file descriptors. ** If successful, open the super-journal file for reading. */ - pSuper = (sqlite3_file *)sqlite3MallocZero(2 * (i64)pVfs->szOsFile); + pSuper = (sqlite3_file *)sqlite3MallocZero(pVfs->szOsFile * 2); if( !pSuper ){ rc = SQLITE_NOMEM_BKPT; pJournal = 0; @@ -60619,14 +59886,11 @@ static int pager_delsuper(Pager *pPager, const char *zSuper){ */ rc = sqlite3OsFileSize(pSuper, &nSuperJournal); if( rc!=SQLITE_OK ) goto delsuper_out; - nSuperPtr = 1 + (i64)pVfs->mxPathname; - assert( nSuperJournal>=0 && nSuperPtr>0 ); + nSuperPtr = pVfs->mxPathname+1; zFree = sqlite3Malloc(4 + nSuperJournal + nSuperPtr + 2); if( !zFree ){ rc = SQLITE_NOMEM_BKPT; goto delsuper_out; - }else{ - assert( nSuperJournal<=0x7fffffff ); } zFree[0] = zFree[1] = zFree[2] = zFree[3] = 0; zSuperJournal = &zFree[4]; @@ -60887,7 +60151,7 @@ static int pager_playback(Pager *pPager, int isHot){ ** for pageSize. */ zSuper = pPager->pTmpSpace; - rc = readSuperJournal(pPager->jfd, zSuper, 1+(i64)pPager->pVfs->mxPathname); + rc = readSuperJournal(pPager->jfd, zSuper, pPager->pVfs->mxPathname+1); if( rc==SQLITE_OK && zSuper[0] ){ rc = sqlite3OsAccess(pVfs, zSuper, SQLITE_ACCESS_EXISTS, &res); } @@ -61026,7 +60290,7 @@ end_playback: ** which case it requires 4 0x00 bytes in memory immediately before ** the filename. */ zSuper = &pPager->pTmpSpace[4]; - rc = readSuperJournal(pPager->jfd, zSuper, 1+(i64)pPager->pVfs->mxPathname); + rc = readSuperJournal(pPager->jfd, zSuper, pPager->pVfs->mxPathname+1); testcase( rc!=SQLITE_OK ); } if( rc==SQLITE_OK @@ -62797,7 +62061,6 @@ SQLITE_PRIVATE int sqlite3PagerOpen( const char *zUri = 0; /* URI args to copy */ int nUriByte = 1; /* Number of bytes of URI args at *zUri */ - /* Figure out how much space is required for each journal file-handle ** (there are two of them, the main journal and the sub-journal). */ journalFileSize = ROUND8(sqlite3JournalSize(pVfs)); @@ -62823,8 +62086,8 @@ SQLITE_PRIVATE int sqlite3PagerOpen( */ if( zFilename && zFilename[0] ){ const char *z; - nPathname = pVfs->mxPathname + 1; - zPathname = sqlite3DbMallocRaw(0, 2*(i64)nPathname); + nPathname = pVfs->mxPathname+1; + zPathname = sqlite3DbMallocRaw(0, nPathname*2); if( zPathname==0 ){ return SQLITE_NOMEM_BKPT; } @@ -62911,14 +62174,14 @@ SQLITE_PRIVATE int sqlite3PagerOpen( ROUND8(sizeof(*pPager)) + /* Pager structure */ ROUND8(pcacheSize) + /* PCache object */ ROUND8(pVfs->szOsFile) + /* The main db file */ - (u64)journalFileSize * 2 + /* The two journal files */ + journalFileSize * 2 + /* The two journal files */ SQLITE_PTRSIZE + /* Space to hold a pointer */ 4 + /* Database prefix */ - (u64)nPathname + 1 + /* database filename */ - (u64)nUriByte + /* query parameters */ - (u64)nPathname + 8 + 1 + /* Journal filename */ + nPathname + 1 + /* database filename */ + nUriByte + /* query parameters */ + nPathname + 8 + 1 + /* Journal filename */ #ifndef SQLITE_OMIT_WAL - (u64)nPathname + 4 + 1 + /* WAL filename */ + nPathname + 4 + 1 + /* WAL filename */ #endif 3 /* Terminator */ ); @@ -65641,11 +64904,6 @@ static int pagerOpenWal(Pager *pPager){ pPager->fd, pPager->zWal, pPager->exclusiveMode, pPager->journalSizeLimit, &pPager->pWal ); -#ifdef SQLITE_ENABLE_SETLK_TIMEOUT - if( rc==SQLITE_OK ){ - sqlite3WalDb(pPager->pWal, pPager->dbWal); - } -#endif } pagerFixMaplimit(pPager); @@ -65765,7 +65023,6 @@ SQLITE_PRIVATE int sqlite3PagerWalWriteLock(Pager *pPager, int bLock){ ** blocking locks are required. */ SQLITE_PRIVATE void sqlite3PagerWalDb(Pager *pPager, sqlite3 *db){ - pPager->dbWal = db; if( pagerUseWal(pPager) ){ sqlite3WalDb(pPager->pWal, db); } @@ -66379,11 +65636,6 @@ struct WalCkptInfo { /* ** An open write-ahead log file is represented by an instance of the ** following object. -** -** writeLock: -** This is usually set to 1 whenever the WRITER lock is held. However, -** if it is set to 2, then the WRITER lock is held but must be released -** by walHandleException() if a SEH exception is thrown. */ struct Wal { sqlite3_vfs *pVfs; /* The VFS used to create pDbFd */ @@ -66474,13 +65726,9 @@ struct WalIterator { u32 *aPgno; /* Array of page numbers. */ int nEntry; /* Nr. of entries in aPgno[] and aIndex[] */ int iZero; /* Frame number associated with aPgno[0] */ - } aSegment[FLEXARRAY]; /* One for every 32KB page in the wal-index */ + } aSegment[1]; /* One for every 32KB page in the wal-index */ }; -/* Size (in bytes) of a WalIterator object suitable for N or fewer segments */ -#define SZ_WALITERATOR(N) \ - (offsetof(WalIterator,aSegment)*(N)*sizeof(struct WalSegment)) - /* ** Define the parameters of the hash tables in the wal-index file. There ** is a hash-table following every HASHTABLE_NPAGE page numbers in the @@ -66639,7 +65887,7 @@ static SQLITE_NOINLINE int walIndexPageRealloc( /* Enlarge the pWal->apWiData[] array if required */ if( pWal->nWiData<=iPage ){ - sqlite3_int64 nByte = sizeof(u32*)*(1+(i64)iPage); + sqlite3_int64 nByte = sizeof(u32*)*(iPage+1); volatile u32 **apNew; apNew = (volatile u32 **)sqlite3Realloc((void *)pWal->apWiData, nByte); if( !apNew ){ @@ -66748,8 +65996,10 @@ static void walChecksumBytes( s1 = s2 = 0; } - /* nByte is a multiple of 8 between 8 and 65536 */ - assert( nByte>=8 && (nByte&7)==0 && nByte<=65536 ); + assert( nByte>=8 ); + assert( (nByte&0x00000007)==0 ); + assert( nByte<=65536 ); + assert( nByte%4==0 ); if( !nativeCksum ){ do { @@ -67839,7 +67089,8 @@ static int walIteratorInit(Wal *pWal, u32 nBackfill, WalIterator **pp){ /* Allocate space for the WalIterator object. */ nSegment = walFramePage(iLast) + 1; - nByte = SZ_WALITERATOR(nSegment) + nByte = sizeof(WalIterator) + + (nSegment-1)*sizeof(struct WalSegment) + iLast*sizeof(ht_slot); p = (WalIterator *)sqlite3_malloc64(nByte + sizeof(ht_slot) * (iLast>HASHTABLE_NPAGE?HASHTABLE_NPAGE:iLast) @@ -67910,7 +67161,7 @@ static int walEnableBlockingMs(Wal *pWal, int nMs){ static int walEnableBlocking(Wal *pWal){ int res = 0; if( pWal->db ){ - int tmout = pWal->db->setlkTimeout; + int tmout = pWal->db->busyTimeout; if( tmout ){ res = walEnableBlockingMs(pWal, tmout); } @@ -68296,9 +67547,7 @@ static int walHandleException(Wal *pWal){ static const int S = 1; static const int E = (1<writeLock==2 ) pWal->writeLock = 0; - mUnlock = pWal->lockMask & ~( + u32 mUnlock = pWal->lockMask & ~( (pWal->readLock<0 ? 0 : (S << WAL_READ_LOCK(pWal->readLock))) | (pWal->writeLock ? (E << WAL_WRITE_LOCK) : 0) | (pWal->ckptLock ? (E << WAL_CKPT_LOCK) : 0) @@ -68570,12 +67819,7 @@ static int walIndexReadHdr(Wal *pWal, int *pChanged){ if( bWriteLock || SQLITE_OK==(rc = walLockExclusive(pWal, WAL_WRITE_LOCK, 1)) ){ - /* If the write-lock was just obtained, set writeLock to 2 instead of - ** the usual 1. This causes walIndexPage() to behave as if the - ** write-lock were held (so that it allocates new pages as required), - ** and walHandleException() to unlock the write-lock if a SEH exception - ** is thrown. */ - if( !bWriteLock ) pWal->writeLock = 2; + pWal->writeLock = 1; if( SQLITE_OK==(rc = walIndexPage(pWal, 0, &page0)) ){ badHdr = walIndexTryHdr(pWal, pChanged); if( badHdr ){ @@ -68939,6 +68183,7 @@ static int walTryBeginRead(Wal *pWal, int *pChanged, int useWal, int *pCnt){ rc = walIndexReadHdr(pWal, pChanged); } #ifdef SQLITE_ENABLE_SETLK_TIMEOUT + walDisableBlocking(pWal); if( rc==SQLITE_BUSY_TIMEOUT ){ rc = SQLITE_BUSY; *pCnt |= WAL_RETRY_BLOCKED_MASK; @@ -68953,7 +68198,6 @@ static int walTryBeginRead(Wal *pWal, int *pChanged, int useWal, int *pCnt){ ** WAL_RETRY this routine will be called again and will probably be ** right on the second iteration. */ - (void)walEnableBlocking(pWal); if( pWal->apWiData[0]==0 ){ /* This branch is taken when the xShmMap() method returns SQLITE_BUSY. ** We assume this is a transient condition, so return WAL_RETRY. The @@ -68970,7 +68214,6 @@ static int walTryBeginRead(Wal *pWal, int *pChanged, int useWal, int *pCnt){ rc = SQLITE_BUSY_RECOVERY; } } - walDisableBlocking(pWal); if( rc!=SQLITE_OK ){ return rc; } @@ -69361,11 +68604,8 @@ SQLITE_PRIVATE int sqlite3WalBeginReadTransaction(Wal *pWal, int *pChanged){ ** read-lock. */ SQLITE_PRIVATE void sqlite3WalEndReadTransaction(Wal *pWal){ -#ifndef SQLITE_ENABLE_SETLK_TIMEOUT - assert( pWal->writeLock==0 || pWal->readLock<0 ); -#endif + sqlite3WalEndWriteTransaction(pWal); if( pWal->readLock>=0 ){ - sqlite3WalEndWriteTransaction(pWal); walUnlockShared(pWal, WAL_READ_LOCK(pWal->readLock)); pWal->readLock = -1; } @@ -69558,7 +68798,7 @@ SQLITE_PRIVATE int sqlite3WalBeginWriteTransaction(Wal *pWal){ ** read-transaction was even opened, making this call a no-op. ** Return early. */ if( pWal->writeLock ){ - assert( !memcmp(&pWal->hdr,(void*)pWal->apWiData[0],sizeof(WalIndexHdr)) ); + assert( !memcmp(&pWal->hdr,(void *)walIndexHdr(pWal),sizeof(WalIndexHdr)) ); return SQLITE_OK; } #endif @@ -69658,7 +68898,6 @@ SQLITE_PRIVATE int sqlite3WalUndo(Wal *pWal, int (*xUndo)(void *, Pgno), void *p if( iMax!=pWal->hdr.mxFrame ) walCleanupHash(pWal); } SEH_EXCEPT( rc = SQLITE_IOERR_IN_PAGE; ) - pWal->iReCksum = 0; } return rc; } @@ -69706,9 +68945,6 @@ SQLITE_PRIVATE int sqlite3WalSavepointUndo(Wal *pWal, u32 *aWalData){ walCleanupHash(pWal); } SEH_EXCEPT( rc = SQLITE_IOERR_IN_PAGE; ) - if( pWal->iReCksum>pWal->hdr.mxFrame ){ - pWal->iReCksum = 0; - } } return rc; @@ -71011,12 +70247,6 @@ struct CellInfo { */ #define BTCURSOR_MAX_DEPTH 20 -/* -** Maximum amount of storage local to a database page, regardless of -** page size. -*/ -#define BT_MAX_LOCAL 65501 /* 65536 - 35 */ - /* ** A cursor is a pointer to a particular entry within a particular ** b-tree within a database file. @@ -71425,7 +70655,7 @@ SQLITE_PRIVATE int sqlite3BtreeHoldsMutex(Btree *p){ */ static void SQLITE_NOINLINE btreeEnterAll(sqlite3 *db){ int i; - u8 skipOk = 1; + int skipOk = 1; Btree *p; assert( sqlite3_mutex_held(db->mutex) ); for(i=0; inDb; i++){ @@ -72281,7 +71511,7 @@ static int saveCursorKey(BtCursor *pCur){ ** below. */ void *pKey; pCur->nKey = sqlite3BtreePayloadSize(pCur); - pKey = sqlite3Malloc( ((i64)pCur->nKey) + 9 + 8 ); + pKey = sqlite3Malloc( pCur->nKey + 9 + 8 ); if( pKey ){ rc = sqlite3BtreePayload(pCur, 0, (int)pCur->nKey, pKey); if( rc==SQLITE_OK ){ @@ -72571,7 +71801,7 @@ SQLITE_PRIVATE void sqlite3BtreeCursorHint(BtCursor *pCur, int eHintType, ...){ */ SQLITE_PRIVATE void sqlite3BtreeCursorHintFlags(BtCursor *pCur, unsigned x){ assert( x==BTREE_SEEK_EQ || x==BTREE_BULKLOAD || x==0 ); - pCur->hints = (u8)x; + pCur->hints = x; } @@ -72765,15 +71995,14 @@ static SQLITE_NOINLINE void btreeParseCellAdjustSizeForOverflow( static int btreePayloadToLocal(MemPage *pPage, i64 nPayload){ int maxLocal; /* Maximum amount of payload held locally */ maxLocal = pPage->maxLocal; - assert( nPayload>=0 ); if( nPayload<=maxLocal ){ - return (int)nPayload; + return nPayload; }else{ int minLocal; /* Minimum amount of payload held locally */ int surplus; /* Overflow payload available for local storage */ minLocal = pPage->minLocal; - surplus = (int)(minLocal +(nPayload - minLocal)%(pPage->pBt->usableSize-4)); - return (surplus <= maxLocal) ? surplus : minLocal; + surplus = minLocal + (nPayload - minLocal)%(pPage->pBt->usableSize-4); + return ( surplus <= maxLocal ) ? surplus : minLocal; } } @@ -72883,13 +72112,11 @@ static void btreeParseCellPtr( pInfo->pPayload = pIter; testcase( nPayload==pPage->maxLocal ); testcase( nPayload==(u32)pPage->maxLocal+1 ); - assert( nPayload>=0 ); - assert( pPage->maxLocal <= BT_MAX_LOCAL ); if( nPayload<=pPage->maxLocal ){ /* This is the (easy) common case where the entire payload fits ** on the local page. No overflow is required. */ - pInfo->nSize = (u16)nPayload + (u16)(pIter - pCell); + pInfo->nSize = nPayload + (u16)(pIter - pCell); if( pInfo->nSize<4 ) pInfo->nSize = 4; pInfo->nLocal = (u16)nPayload; }else{ @@ -72922,13 +72149,11 @@ static void btreeParseCellPtrIndex( pInfo->pPayload = pIter; testcase( nPayload==pPage->maxLocal ); testcase( nPayload==(u32)pPage->maxLocal+1 ); - assert( nPayload>=0 ); - assert( pPage->maxLocal <= BT_MAX_LOCAL ); if( nPayload<=pPage->maxLocal ){ /* This is the (easy) common case where the entire payload fits ** on the local page. No overflow is required. */ - pInfo->nSize = (u16)nPayload + (u16)(pIter - pCell); + pInfo->nSize = nPayload + (u16)(pIter - pCell); if( pInfo->nSize<4 ) pInfo->nSize = 4; pInfo->nLocal = (u16)nPayload; }else{ @@ -73467,14 +72692,14 @@ static SQLITE_INLINE int allocateSpace(MemPage *pPage, int nByte, int *pIdx){ ** at the end of the page. So do additional corruption checks inside this ** routine and return SQLITE_CORRUPT if any problems are found. */ -static int freeSpace(MemPage *pPage, int iStart, int iSize){ - int iPtr; /* Address of ptr to next freeblock */ - int iFreeBlk; /* Address of the next freeblock */ +static int freeSpace(MemPage *pPage, u16 iStart, u16 iSize){ + u16 iPtr; /* Address of ptr to next freeblock */ + u16 iFreeBlk; /* Address of the next freeblock */ u8 hdr; /* Page header size. 0 or 100 */ - int nFrag = 0; /* Reduction in fragmentation */ - int iOrigSize = iSize; /* Original value of iSize */ - int x; /* Offset to cell content area */ - int iEnd = iStart + iSize; /* First byte past the iStart buffer */ + u8 nFrag = 0; /* Reduction in fragmentation */ + u16 iOrigSize = iSize; /* Original value of iSize */ + u16 x; /* Offset to cell content area */ + u32 iEnd = iStart + iSize; /* First byte past the iStart buffer */ unsigned char *data = pPage->aData; /* Page content */ u8 *pTmp; /* Temporary ptr into data[] */ @@ -73501,7 +72726,7 @@ static int freeSpace(MemPage *pPage, int iStart, int iSize){ } iPtr = iFreeBlk; } - if( iFreeBlk>(int)pPage->pBt->usableSize-4 ){ /* TH3: corrupt081.100 */ + if( iFreeBlk>pPage->pBt->usableSize-4 ){ /* TH3: corrupt081.100 */ return SQLITE_CORRUPT_PAGE(pPage); } assert( iFreeBlk>iPtr || iFreeBlk==0 || CORRUPT_DB ); @@ -73516,7 +72741,7 @@ static int freeSpace(MemPage *pPage, int iStart, int iSize){ nFrag = iFreeBlk - iEnd; if( iEnd>iFreeBlk ) return SQLITE_CORRUPT_PAGE(pPage); iEnd = iFreeBlk + get2byte(&data[iFreeBlk+2]); - if( iEnd > (int)pPage->pBt->usableSize ){ + if( iEnd > pPage->pBt->usableSize ){ return SQLITE_CORRUPT_PAGE(pPage); } iSize = iEnd - iStart; @@ -73537,7 +72762,7 @@ static int freeSpace(MemPage *pPage, int iStart, int iSize){ } } if( nFrag>data[hdr+7] ) return SQLITE_CORRUPT_PAGE(pPage); - data[hdr+7] -= (u8)nFrag; + data[hdr+7] -= nFrag; } pTmp = &data[hdr+5]; x = get2byte(pTmp); @@ -73558,8 +72783,7 @@ static int freeSpace(MemPage *pPage, int iStart, int iSize){ /* Insert the new freeblock into the freelist */ put2byte(&data[iPtr], iStart); put2byte(&data[iStart], iFreeBlk); - assert( iSize>=0 && iSize<=0xffff ); - put2byte(&data[iStart+2], (u16)iSize); + put2byte(&data[iStart+2], iSize); } pPage->nFree += iOrigSize; return SQLITE_OK; @@ -73785,7 +73009,7 @@ static int btreeInitPage(MemPage *pPage){ assert( pBt->pageSize>=512 && pBt->pageSize<=65536 ); pPage->maskPage = (u16)(pBt->pageSize - 1); pPage->nOverflow = 0; - pPage->cellOffset = (u16)(pPage->hdrOffset + 8 + pPage->childPtrSize); + pPage->cellOffset = pPage->hdrOffset + 8 + pPage->childPtrSize; pPage->aCellIdx = data + pPage->childPtrSize + 8; pPage->aDataEnd = pPage->aData + pBt->pageSize; pPage->aDataOfst = pPage->aData + pPage->childPtrSize; @@ -73819,8 +73043,8 @@ static int btreeInitPage(MemPage *pPage){ static void zeroPage(MemPage *pPage, int flags){ unsigned char *data = pPage->aData; BtShared *pBt = pPage->pBt; - int hdr = pPage->hdrOffset; - int first; + u8 hdr = pPage->hdrOffset; + u16 first; assert( sqlite3PagerPagenumber(pPage->pDbPage)==pPage->pgno || CORRUPT_DB ); assert( sqlite3PagerGetExtra(pPage->pDbPage) == (void*)pPage ); @@ -73837,7 +73061,7 @@ static void zeroPage(MemPage *pPage, int flags){ put2byte(&data[hdr+5], pBt->usableSize); pPage->nFree = (u16)(pBt->usableSize - first); decodeFlags(pPage, flags); - pPage->cellOffset = (u16)first; + pPage->cellOffset = first; pPage->aDataEnd = &data[pBt->pageSize]; pPage->aCellIdx = &data[first]; pPage->aDataOfst = &data[pPage->childPtrSize]; @@ -74623,7 +73847,7 @@ SQLITE_PRIVATE int sqlite3BtreeSetPageSize(Btree *p, int pageSize, int nReserve, BtShared *pBt = p->pBt; assert( nReserve>=0 && nReserve<=255 ); sqlite3BtreeEnter(p); - pBt->nReserveWanted = (u8)nReserve; + pBt->nReserveWanted = nReserve; x = pBt->pageSize - pBt->usableSize; if( nReservebtsFlags & BTS_PAGESIZE_FIXED ){ @@ -74729,7 +73953,7 @@ SQLITE_PRIVATE int sqlite3BtreeSecureDelete(Btree *p, int newFlag){ assert( BTS_FAST_SECURE==(BTS_OVERWRITE|BTS_SECURE_DELETE) ); if( newFlag>=0 ){ p->pBt->btsFlags &= ~BTS_FAST_SECURE; - p->pBt->btsFlags |= (u16)(BTS_SECURE_DELETE*newFlag); + p->pBt->btsFlags |= BTS_SECURE_DELETE*newFlag; } b = (p->pBt->btsFlags & BTS_FAST_SECURE)/BTS_SECURE_DELETE; sqlite3BtreeLeave(p); @@ -75249,13 +74473,6 @@ static SQLITE_NOINLINE int btreeBeginTrans( (void)sqlite3PagerWalWriteLock(pPager, 0); unlockBtreeIfUnused(pBt); } -#if defined(SQLITE_ENABLE_SETLK_TIMEOUT) - if( rc==SQLITE_BUSY_TIMEOUT ){ - /* If a blocking lock timed out, break out of the loop here so that - ** the busy-handler is not invoked. */ - break; - } -#endif }while( (rc&0xFF)==SQLITE_BUSY && pBt->inTransaction==TRANS_NONE && btreeInvokeBusyHandler(pBt) ); sqlite3PagerWalDb(pPager, 0); @@ -77665,7 +76882,7 @@ bypass_moveto_root: rc = SQLITE_CORRUPT_PAGE(pPage); goto moveto_index_finish; } - pCellKey = sqlite3Malloc( (u64)nCell+(u64)nOverrun ); + pCellKey = sqlite3Malloc( nCell+nOverrun ); if( pCellKey==0 ){ rc = SQLITE_NOMEM_BKPT; goto moveto_index_finish; @@ -79184,8 +78401,7 @@ static int rebuildPage( } /* The pPg->nFree field is now set incorrectly. The caller will fix it. */ - assert( nCell < 10922 ); - pPg->nCell = (u16)nCell; + pPg->nCell = nCell; pPg->nOverflow = 0; put2byte(&aData[hdr+1], 0); @@ -79432,13 +78648,9 @@ static int editPage( if( pageInsertArray( pPg, pBegin, &pData, pCellptr, iNew+nCell, nNew-nCell, pCArray - ) - ){ - goto editpage_fail; - } + ) ) goto editpage_fail; - assert( nNew < 10922 ); - pPg->nCell = (u16)nNew; + pPg->nCell = nNew; pPg->nOverflow = 0; put2byte(&aData[hdr+3], pPg->nCell); @@ -79747,7 +78959,7 @@ static int balance_nonroot( int pageFlags; /* Value of pPage->aData[0] */ int iSpace1 = 0; /* First unused byte of aSpace1[] */ int iOvflSpace = 0; /* First unused byte of aOvflSpace[] */ - u64 szScratch; /* Size of scratch memory requested */ + int szScratch; /* Size of scratch memory requested */ MemPage *apOld[NB]; /* pPage and up to two siblings */ MemPage *apNew[NB+2]; /* pPage and up to NB siblings after balancing */ u8 *pRight; /* Location in parent of right-sibling pointer */ @@ -81032,7 +80244,7 @@ SQLITE_PRIVATE int sqlite3BtreeInsert( if( pCur->info.nKey==pX->nKey ){ BtreePayload x2; x2.pData = pX->pKey; - x2.nData = (int)pX->nKey; assert( pX->nKey<=0x7fffffff ); + x2.nData = pX->nKey; x2.nZero = 0; return btreeOverwriteCell(pCur, &x2); } @@ -81213,7 +80425,7 @@ SQLITE_PRIVATE int sqlite3BtreeTransferRow(BtCursor *pDest, BtCursor *pSrc, i64 getCellInfo(pSrc); if( pSrc->info.nPayload<0x80 ){ - *(aOut++) = (u8)pSrc->info.nPayload; + *(aOut++) = pSrc->info.nPayload; }else{ aOut += sqlite3PutVarint(aOut, pSrc->info.nPayload); } @@ -81226,7 +80438,7 @@ SQLITE_PRIVATE int sqlite3BtreeTransferRow(BtCursor *pDest, BtCursor *pSrc, i64 nRem = pSrc->info.nPayload; if( nIn==nRem && nInpPage->maxLocal ){ memcpy(aOut, aIn, nIn); - pBt->nPreformatSize = nIn + (int)(aOut - pBt->pTmpSpace); + pBt->nPreformatSize = nIn + (aOut - pBt->pTmpSpace); return SQLITE_OK; }else{ int rc = SQLITE_OK; @@ -81238,7 +80450,7 @@ SQLITE_PRIVATE int sqlite3BtreeTransferRow(BtCursor *pDest, BtCursor *pSrc, i64 u32 nOut; /* Size of output buffer aOut[] */ nOut = btreePayloadToLocal(pDest->pPage, pSrc->info.nPayload); - pBt->nPreformatSize = (int)nOut + (int)(aOut - pBt->pTmpSpace); + pBt->nPreformatSize = nOut + (aOut - pBt->pTmpSpace); if( nOutinfo.nPayload ){ pPgnoOut = &aOut[nOut]; pBt->nPreformatSize += 4; @@ -82859,7 +82071,6 @@ SQLITE_PRIVATE int sqlite3BtreeIsInBackup(Btree *p){ */ SQLITE_PRIVATE void *sqlite3BtreeSchema(Btree *p, int nBytes, void(*xFree)(void *)){ BtShared *pBt = p->pBt; - assert( nBytes==0 || nBytes==sizeof(Schema) ); sqlite3BtreeEnter(p); if( !pBt->pSchema && nBytes ){ pBt->pSchema = sqlite3DbMallocZero(0, nBytes); @@ -83976,7 +83187,7 @@ static void vdbeMemRenderNum(int sz, char *zBuf, Mem *p){ ** corresponding string value, then it is important that the string be ** derived from the numeric value, not the other way around, to ensure ** that the index and table are consistent. See ticket -** https://sqlite.org/src/info/343634942dd54ab (2018-01-31) for +** https://www.sqlite.org/src/info/343634942dd54ab (2018-01-31) for ** an example. ** ** This routine looks at pMem to verify that if it has both a numeric @@ -84162,7 +83373,7 @@ SQLITE_PRIVATE void sqlite3VdbeMemZeroTerminateIfAble(Mem *pMem){ return; } if( pMem->enc!=SQLITE_UTF8 ) return; - assert( pMem->z!=0 ); + if( NEVER(pMem->z==0) ) return; if( pMem->flags & MEM_Dyn ){ if( pMem->xDel==sqlite3_free && sqlite3_msize(pMem->z) >= (u64)(pMem->n+1) @@ -85275,7 +84486,7 @@ static sqlite3_value *valueNew(sqlite3 *db, struct ValueNewStat4Ctx *p){ if( pRec==0 ){ Index *pIdx = p->pIdx; /* Index being probed */ - i64 nByte; /* Bytes of space to allocate */ + int nByte; /* Bytes of space to allocate */ int i; /* Counter variable */ int nCol = pIdx->nColumn; /* Number of index columns including rowid */ @@ -85341,7 +84552,7 @@ static int valueFromFunction( ){ sqlite3_context ctx; /* Context object for function invocation */ sqlite3_value **apVal = 0; /* Function arguments */ - int nVal = 0; /* Number of function arguments */ + int nVal = 0; /* Size of apVal[] array */ FuncDef *pFunc = 0; /* Function definition */ sqlite3_value *pVal = 0; /* New value */ int rc = SQLITE_OK; /* Return code */ @@ -86339,10 +85550,12 @@ SQLITE_PRIVATE int sqlite3VdbeAddFunctionCall( int eCallCtx /* Calling context */ ){ Vdbe *v = pParse->pVdbe; + int nByte; int addr; sqlite3_context *pCtx; assert( v ); - pCtx = sqlite3DbMallocRawNN(pParse->db, SZ_CONTEXT(nArg)); + nByte = sizeof(*pCtx) + (nArg-1)*sizeof(sqlite3_value*); + pCtx = sqlite3DbMallocRawNN(pParse->db, nByte); if( pCtx==0 ){ assert( pParse->db->mallocFailed ); freeEphemeralFunction(pParse->db, (FuncDef*)pFunc); @@ -86618,7 +85831,7 @@ static Op *opIterNext(VdbeOpIter *p){ } if( pRet->p4type==P4_SUBPROGRAM ){ - i64 nByte = (1+(u64)p->nSub)*sizeof(SubProgram*); + int nByte = (p->nSub+1)*sizeof(SubProgram*); int j; for(j=0; jnSub; j++){ if( p->apSub[j]==pRet->p4.pProgram ) break; @@ -86748,8 +85961,8 @@ SQLITE_PRIVATE void sqlite3VdbeAssertAbortable(Vdbe *p){ ** (1) For each jump instruction with a negative P2 value (a label) ** resolve the P2 value to an actual address. ** -** (2) Compute the maximum number of arguments used by the xUpdate/xFilter -** methods of any virtual table and store that value in *pMaxVtabArgs. +** (2) Compute the maximum number of arguments used by any SQL function +** and store that value in *pMaxFuncArgs. ** ** (3) Update the Vdbe.readOnly and Vdbe.bIsReader flags to accurately ** indicate what the prepared statement actually does. @@ -86762,8 +85975,8 @@ SQLITE_PRIVATE void sqlite3VdbeAssertAbortable(Vdbe *p){ ** script numbers the opcodes correctly. Changes to this routine must be ** coordinated with changes to mkopcodeh.tcl. */ -static void resolveP2Values(Vdbe *p, int *pMaxVtabArgs){ - int nMaxVtabArgs = *pMaxVtabArgs; +static void resolveP2Values(Vdbe *p, int *pMaxFuncArgs){ + int nMaxArgs = *pMaxFuncArgs; Op *pOp; Parse *pParse = p->pParse; int *aLabel = pParse->aLabel; @@ -86808,19 +86021,15 @@ static void resolveP2Values(Vdbe *p, int *pMaxVtabArgs){ } #ifndef SQLITE_OMIT_VIRTUALTABLE case OP_VUpdate: { - if( pOp->p2>nMaxVtabArgs ) nMaxVtabArgs = pOp->p2; + if( pOp->p2>nMaxArgs ) nMaxArgs = pOp->p2; break; } case OP_VFilter: { int n; - /* The instruction immediately prior to VFilter will be an - ** OP_Integer that sets the "argc" value for the VFilter. See - ** the code where OP_VFilter is generated at tag-20250207a. */ assert( (pOp - p->aOp) >= 3 ); assert( pOp[-1].opcode==OP_Integer ); - assert( pOp[-1].p2==pOp->p3+1 ); n = pOp[-1].p1; - if( n>nMaxVtabArgs ) nMaxVtabArgs = n; + if( n>nMaxArgs ) nMaxArgs = n; /* Fall through into the default case */ /* no break */ deliberate_fall_through } @@ -86861,7 +86070,7 @@ resolve_p2_values_loop_exit: pParse->aLabel = 0; } pParse->nLabel = 0; - *pMaxVtabArgs = nMaxVtabArgs; + *pMaxFuncArgs = nMaxArgs; assert( p->bIsReader!=0 || DbMaskAllZero(p->btreeMask) ); } @@ -87090,7 +86299,7 @@ SQLITE_PRIVATE void sqlite3VdbeScanStatus( const char *zName /* Name of table or index being scanned */ ){ if( IS_STMT_SCANSTATUS(p->db) ){ - i64 nByte = (1+(i64)p->nScan) * sizeof(ScanStatus); + sqlite3_int64 nByte = (p->nScan+1) * sizeof(ScanStatus); ScanStatus *aNew; aNew = (ScanStatus*)sqlite3DbRealloc(p->db, p->aScan, nByte); if( aNew ){ @@ -87200,9 +86409,6 @@ SQLITE_PRIVATE void sqlite3VdbeChangeP5(Vdbe *p, u16 p5){ */ SQLITE_PRIVATE void sqlite3VdbeTypeofColumn(Vdbe *p, int iDest){ VdbeOp *pOp = sqlite3VdbeGetLastOp(p); -#ifdef SQLITE_DEBUG - while( pOp->opcode==OP_ReleaseReg ) pOp--; -#endif if( pOp->p3==iDest && pOp->opcode==OP_Column ){ pOp->p5 |= OPFLAG_TYPEOFARG; } @@ -88542,7 +87748,7 @@ SQLITE_PRIVATE void sqlite3VdbeMakeReady( int nVar; /* Number of parameters */ int nMem; /* Number of VM memory registers */ int nCursor; /* Number of cursors required */ - int nArg; /* Max number args to xFilter or xUpdate */ + int nArg; /* Number of arguments in subprograms */ int n; /* Loop counter */ struct ReusableSpace x; /* Reusable bulk memory */ @@ -88614,9 +87820,6 @@ SQLITE_PRIVATE void sqlite3VdbeMakeReady( p->apCsr = allocSpace(&x, p->apCsr, nCursor*sizeof(VdbeCursor*)); } } -#ifdef SQLITE_DEBUG - p->napArg = nArg; -#endif if( db->mallocFailed ){ p->nVar = 0; @@ -90114,7 +89317,6 @@ SQLITE_PRIVATE UnpackedRecord *sqlite3VdbeAllocUnpackedRecord( ){ UnpackedRecord *p; /* Unpacked record to return */ int nByte; /* Number of bytes required for *p */ - assert( sizeof(UnpackedRecord) + sizeof(Mem)*65536 < 0x7fffffff ); nByte = ROUND8P(sizeof(UnpackedRecord)) + sizeof(Mem)*(pKeyInfo->nKeyField+1); p = (UnpackedRecord *)sqlite3DbMallocRaw(pKeyInfo->db, nByte); if( !p ) return 0; @@ -91421,11 +90623,10 @@ SQLITE_PRIVATE void sqlite3VdbePreUpdateHook( preupdate.pCsr = pCsr; preupdate.op = op; preupdate.iNewReg = iReg; - preupdate.pKeyinfo = (KeyInfo*)&preupdate.keyinfoSpace; - preupdate.pKeyinfo->db = db; - preupdate.pKeyinfo->enc = ENC(db); - preupdate.pKeyinfo->nKeyField = pTab->nCol; - preupdate.pKeyinfo->aSortFlags = (u8*)&fakeSortOrder; + preupdate.keyinfo.db = db; + preupdate.keyinfo.enc = ENC(db); + preupdate.keyinfo.nKeyField = pTab->nCol; + preupdate.keyinfo.aSortFlags = (u8*)&fakeSortOrder; preupdate.iKey1 = iKey1; preupdate.iKey2 = iKey2; preupdate.pTab = pTab; @@ -91435,8 +90636,8 @@ SQLITE_PRIVATE void sqlite3VdbePreUpdateHook( db->xPreUpdateCallback(db->pPreUpdateArg, db, op, zDb, zTbl, iKey1, iKey2); db->pPreUpdate = 0; sqlite3DbFree(db, preupdate.aRecord); - vdbeFreeUnpacked(db, preupdate.pKeyinfo->nKeyField+1,preupdate.pUnpacked); - vdbeFreeUnpacked(db, preupdate.pKeyinfo->nKeyField+1,preupdate.pNewUnpacked); + vdbeFreeUnpacked(db, preupdate.keyinfo.nKeyField+1, preupdate.pUnpacked); + vdbeFreeUnpacked(db, preupdate.keyinfo.nKeyField+1, preupdate.pNewUnpacked); sqlite3VdbeMemRelease(&preupdate.oldipk); if( preupdate.aNew ){ int i; @@ -93267,7 +92468,7 @@ SQLITE_API int sqlite3_bind_text64( assert( xDel!=SQLITE_DYNAMIC ); if( enc!=SQLITE_UTF8 ){ if( enc==SQLITE_UTF16 ) enc = SQLITE_UTF16NATIVE; - nData &= ~(u64)1; + nData &= ~(u16)1; } return bindText(pStmt, i, zData, nData, xDel, enc); } @@ -93675,7 +92876,7 @@ SQLITE_API int sqlite3_preupdate_old(sqlite3 *db, int iIdx, sqlite3_value **ppVa if( !aRec ) goto preupdate_old_out; rc = sqlite3BtreePayload(p->pCsr->uc.pCursor, 0, nRec, aRec); if( rc==SQLITE_OK ){ - p->pUnpacked = vdbeUnpackRecord(p->pKeyinfo, nRec, aRec); + p->pUnpacked = vdbeUnpackRecord(&p->keyinfo, nRec, aRec); if( !p->pUnpacked ) rc = SQLITE_NOMEM; } if( rc!=SQLITE_OK ){ @@ -93692,9 +92893,7 @@ SQLITE_API int sqlite3_preupdate_old(sqlite3 *db, int iIdx, sqlite3_value **ppVa Column *pCol = &p->pTab->aCol[iIdx]; if( pCol->iDflt>0 ){ if( p->apDflt==0 ){ - int nByte; - assert( sizeof(sqlite3_value*)*UMXV(p->pTab->nCol) < 0x7fffffff ); - nByte = sizeof(sqlite3_value*)*p->pTab->nCol; + int nByte = sizeof(sqlite3_value*)*p->pTab->nCol; p->apDflt = (sqlite3_value**)sqlite3DbMallocZero(db, nByte); if( p->apDflt==0 ) goto preupdate_old_out; } @@ -93740,7 +92939,7 @@ SQLITE_API int sqlite3_preupdate_count(sqlite3 *db){ #else p = db->pPreUpdate; #endif - return (p ? p->pKeyinfo->nKeyField : 0); + return (p ? p->keyinfo.nKeyField : 0); } #endif /* SQLITE_ENABLE_PREUPDATE_HOOK */ @@ -93823,7 +93022,7 @@ SQLITE_API int sqlite3_preupdate_new(sqlite3 *db, int iIdx, sqlite3_value **ppVa Mem *pData = &p->v->aMem[p->iNewReg]; rc = ExpandBlob(pData); if( rc!=SQLITE_OK ) goto preupdate_new_out; - pUnpack = vdbeUnpackRecord(p->pKeyinfo, pData->n, pData->z); + pUnpack = vdbeUnpackRecord(&p->keyinfo, pData->n, pData->z); if( !pUnpack ){ rc = SQLITE_NOMEM; goto preupdate_new_out; @@ -93844,8 +93043,7 @@ SQLITE_API int sqlite3_preupdate_new(sqlite3 *db, int iIdx, sqlite3_value **ppVa */ assert( p->op==SQLITE_UPDATE ); if( !p->aNew ){ - assert( sizeof(Mem)*UMXV(p->pCsr->nField) < 0x7fffffff ); - p->aNew = (Mem *)sqlite3DbMallocZero(db, sizeof(Mem)*p->pCsr->nField); + p->aNew = (Mem *)sqlite3DbMallocZero(db, sizeof(Mem) * p->pCsr->nField); if( !p->aNew ){ rc = SQLITE_NOMEM; goto preupdate_new_out; @@ -94615,11 +93813,11 @@ static VdbeCursor *allocateCursor( */ Mem *pMem = iCur>0 ? &p->aMem[p->nMem-iCur] : p->aMem; - i64 nByte; + int nByte; VdbeCursor *pCx = 0; - nByte = SZ_VDBECURSOR(nField); - assert( ROUND8(nByte)==nByte ); - if( eCurType==CURTYPE_BTREE ) nByte += sqlite3BtreeCursorSize(); + nByte = + ROUND8P(sizeof(VdbeCursor)) + 2*sizeof(u32)*nField + + (eCurType==CURTYPE_BTREE?sqlite3BtreeCursorSize():0); assert( iCur>=0 && iCurnCursor ); if( p->apCsr[iCur] ){ /*OPTIMIZATION-IF-FALSE*/ @@ -94643,7 +93841,7 @@ static VdbeCursor *allocateCursor( pMem->szMalloc = 0; return 0; } - pMem->szMalloc = (int)nByte; + pMem->szMalloc = nByte; } p->apCsr[iCur] = pCx = (VdbeCursor*)pMem->zMalloc; @@ -94652,8 +93850,8 @@ static VdbeCursor *allocateCursor( pCx->nField = nField; pCx->aOffset = &pCx->aType[nField]; if( eCurType==CURTYPE_BTREE ){ - assert( ROUND8(SZ_VDBECURSOR(nField))==SZ_VDBECURSOR(nField) ); - pCx->uc.pCursor = (BtCursor*)&pMem->z[SZ_VDBECURSOR(nField)]; + pCx->uc.pCursor = (BtCursor*) + &pMem->z[ROUND8P(sizeof(VdbeCursor))+2*sizeof(u32)*nField]; sqlite3BtreeCursorZero(pCx->uc.pCursor); } return pCx; @@ -95657,7 +94855,7 @@ case OP_Halt: { sqlite3VdbeError(p, "%s", pOp->p4.z); } pcx = (int)(pOp - aOp); - sqlite3_log(pOp->p1, "abort at %d: %s; [%s]", pcx, p->zErrMsg, p->zSql); + sqlite3_log(pOp->p1, "abort at %d in [%s]: %s", pcx, p->zSql, p->zErrMsg); } rc = sqlite3VdbeHalt(p); assert( rc==SQLITE_BUSY || rc==SQLITE_OK || rc==SQLITE_ERROR ); @@ -96983,7 +96181,7 @@ case OP_BitNot: { /* same as TK_BITNOT, in1, out2 */ break; } -/* Opcode: Once P1 P2 P3 * * +/* Opcode: Once P1 P2 * * * ** ** Fall through to the next instruction the first time this opcode is ** encountered on each invocation of the byte-code program. Jump to P2 @@ -96999,12 +96197,6 @@ case OP_BitNot: { /* same as TK_BITNOT, in1, out2 */ ** whether or not the jump should be taken. The bitmask is necessary ** because the self-altering code trick does not work for recursive ** triggers. -** -** The P3 operand is not used directly by this opcode. However P3 is -** used by the code generator as follows: If this opcode is the start -** of a subroutine and that subroutine uses a Bloom filter, then P3 will -** be the register that holds that Bloom filter. See tag-202407032019 -** in the source code for implementation details. */ case OP_Once: { /* jump */ u32 iAddr; /* Address of this instruction */ @@ -98050,7 +97242,6 @@ case OP_MakeRecord: { zHdr += sqlite3PutVarint(zHdr, serial_type); if( pRec->n ){ assert( pRec->z!=0 ); - assert( pRec->z!=(const char*)sqlite3CtypeMap ); memcpy(zPayload, pRec->z, pRec->n); zPayload += pRec->n; } @@ -100402,7 +99593,7 @@ case OP_RowData: { /* The OP_RowData opcodes always follow OP_NotExists or ** OP_SeekRowid or OP_Rewind/Op_Next with no intervening instructions ** that might invalidate the cursor. - ** If this were not the case, one of the following assert()s + ** If this where not the case, on of the following assert()s ** would fail. Should this ever change (because of changes in the code ** generator) then the fix would be to insert a call to ** sqlite3VdbeCursorMoveto(). @@ -101671,7 +100862,7 @@ case OP_RowSetTest: { /* jump, in1, in3 */ */ case OP_Program: { /* jump0 */ int nMem; /* Number of memory registers for sub-program */ - i64 nByte; /* Bytes of runtime space required for sub-program */ + int nByte; /* Bytes of runtime space required for sub-program */ Mem *pRt; /* Register to allocate runtime space */ Mem *pMem; /* Used to iterate through memory cells */ Mem *pEnd; /* Last memory cell in new array */ @@ -101722,7 +100913,7 @@ case OP_Program: { /* jump0 */ nByte = ROUND8(sizeof(VdbeFrame)) + nMem * sizeof(Mem) + pProgram->nCsr * sizeof(VdbeCursor*) - + (7 + (i64)pProgram->nOp)/8; + + (pProgram->nOp + 7)/8; pFrame = sqlite3DbMallocZero(db, nByte); if( !pFrame ){ goto no_mem; @@ -101730,7 +100921,7 @@ case OP_Program: { /* jump0 */ sqlite3VdbeMemRelease(pRt); pRt->flags = MEM_Blob|MEM_Dyn; pRt->z = (char*)pFrame; - pRt->n = (int)nByte; + pRt->n = nByte; pRt->xDel = sqlite3VdbeFrameMemDel; pFrame->v = p; @@ -101829,14 +101020,12 @@ case OP_Param: { /* out2 */ ** statement counter is incremented (immediate foreign key constraints). */ case OP_FkCounter: { - if( pOp->p1 ){ + if( db->flags & SQLITE_DeferFKs ){ + db->nDeferredImmCons += pOp->p2; + }else if( pOp->p1 ){ db->nDeferredCons += pOp->p2; }else{ - if( db->flags & SQLITE_DeferFKs ){ - db->nDeferredImmCons += pOp->p2; - }else{ - p->nFkConstraint += pOp->p2; - } + p->nFkConstraint += pOp->p2; } break; } @@ -102051,7 +101240,7 @@ case OP_AggStep: { ** ** Note: We could avoid this by using a regular memory cell from aMem[] for ** the accumulator, instead of allocating one here. */ - nAlloc = ROUND8P( SZ_CONTEXT(n) ); + nAlloc = ROUND8P( sizeof(pCtx[0]) + (n-1)*sizeof(sqlite3_value*) ); pCtx = sqlite3DbMallocRawNN(db, nAlloc + sizeof(Mem)); if( pCtx==0 ) goto no_mem; pCtx->pOut = (Mem*)((u8*)pCtx + nAlloc); @@ -102711,7 +101900,6 @@ case OP_VFilter: { /* jump, ncycle */ /* Invoke the xFilter method */ apArg = p->apArg; - assert( nArg<=p->napArg ); for(i = 0; ivtabOnConflict; apArg = p->apArg; pX = &aMem[pOp->p3]; - assert( nArg<=p->napArg ); for(i=0; irc = rc; sqlite3SystemError(db, rc); testcase( sqlite3GlobalConfig.xLog!=0 ); - sqlite3_log(rc, "statement aborts at %d: %s; [%s]", - (int)(pOp - aOp), p->zErrMsg, p->zSql); + sqlite3_log(rc, "statement aborts at %d: [%s] %s", + (int)(pOp - aOp), p->zSql, p->zErrMsg); if( p->eVdbeState==VDBE_RUN_STATE ) sqlite3VdbeHalt(p); if( rc==SQLITE_IOERR_NOMEM ) sqlite3OomFault(db); if( rc==SQLITE_CORRUPT && db->autoCommit==0 ){ @@ -103709,7 +102896,6 @@ SQLITE_API int sqlite3_blob_open( char *zErr = 0; Table *pTab; Incrblob *pBlob = 0; - int iDb; Parse sParse; #ifdef SQLITE_ENABLE_API_ARMOR @@ -103755,10 +102941,7 @@ SQLITE_API int sqlite3_blob_open( sqlite3ErrorMsg(&sParse, "cannot open view: %s", zTable); } #endif - if( pTab==0 - || ((iDb = sqlite3SchemaToIndex(db, pTab->pSchema))==1 && - sqlite3OpenTempDatabase(&sParse)) - ){ + if( !pTab ){ if( sParse.zErrMsg ){ sqlite3DbFree(db, zErr); zErr = sParse.zErrMsg; @@ -103769,11 +102952,15 @@ SQLITE_API int sqlite3_blob_open( goto blob_open_out; } pBlob->pTab = pTab; - pBlob->zDb = db->aDb[iDb].zDbSName; + pBlob->zDb = db->aDb[sqlite3SchemaToIndex(db, pTab->pSchema)].zDbSName; /* Now search pTab for the exact column. */ - iCol = sqlite3ColumnIndex(pTab, zColumn); - if( iCol<0 ){ + for(iCol=0; iColnCol; iCol++) { + if( sqlite3StrICmp(pTab->aCol[iCol].zCnName, zColumn)==0 ){ + break; + } + } + if( iCol==pTab->nCol ){ sqlite3DbFree(db, zErr); zErr = sqlite3MPrintf(db, "no such column: \"%s\"", zColumn); rc = SQLITE_ERROR; @@ -103853,6 +103040,7 @@ SQLITE_API int sqlite3_blob_open( {OP_Halt, 0, 0, 0}, /* 5 */ }; Vdbe *v = (Vdbe *)pBlob->pStmt; + int iDb = sqlite3SchemaToIndex(db, pTab->pSchema); VdbeOp *aOp; sqlite3VdbeAddOp4Int(v, OP_Transaction, iDb, wrFlag, @@ -104430,12 +103618,9 @@ struct VdbeSorter { u8 iPrev; /* Previous thread used to flush PMA */ u8 nTask; /* Size of aTask[] array */ u8 typeMask; - SortSubtask aTask[FLEXARRAY]; /* One or more subtasks */ + SortSubtask aTask[1]; /* One or more subtasks */ }; -/* Size (in bytes) of a VdbeSorter object that works with N or fewer subtasks */ -#define SZ_VDBESORTER(N) (offsetof(VdbeSorter,aTask)+(N)*sizeof(SortSubtask)) - #define SORTER_TYPE_INTEGER 0x01 #define SORTER_TYPE_TEXT 0x02 @@ -105037,7 +104222,7 @@ SQLITE_PRIVATE int sqlite3VdbeSorterInit( VdbeSorter *pSorter; /* The new sorter */ KeyInfo *pKeyInfo; /* Copy of pCsr->pKeyInfo with db==0 */ int szKeyInfo; /* Size of pCsr->pKeyInfo in bytes */ - i64 sz; /* Size of pSorter in bytes */ + int sz; /* Size of pSorter in bytes */ int rc = SQLITE_OK; #if SQLITE_MAX_WORKER_THREADS==0 # define nWorker 0 @@ -105065,10 +104250,8 @@ SQLITE_PRIVATE int sqlite3VdbeSorterInit( assert( pCsr->pKeyInfo ); assert( !pCsr->isEphemeral ); assert( pCsr->eCurType==CURTYPE_SORTER ); - assert( sizeof(KeyInfo) + UMXV(pCsr->pKeyInfo->nKeyField)*sizeof(CollSeq*) - < 0x7fffffff ); - szKeyInfo = SZ_KEYINFO(pCsr->pKeyInfo->nKeyField); - sz = SZ_VDBESORTER(nWorker+1); + szKeyInfo = sizeof(KeyInfo) + (pCsr->pKeyInfo->nKeyField-1)*sizeof(CollSeq*); + sz = sizeof(VdbeSorter) + nWorker * sizeof(SortSubtask); pSorter = (VdbeSorter*)sqlite3DbMallocZero(db, sz + szKeyInfo); pCsr->uc.pSorter = pSorter; @@ -105280,7 +104463,7 @@ static int vdbeSorterJoinAll(VdbeSorter *pSorter, int rcin){ */ static MergeEngine *vdbeMergeEngineNew(int nReader){ int N = 2; /* Smallest power of two >= nReader */ - i64 nByte; /* Total bytes of space to allocate */ + int nByte; /* Total bytes of space to allocate */ MergeEngine *pNew; /* Pointer to allocated object to return */ assert( nReader<=SORTER_MAX_MERGE_COUNT ); @@ -105532,10 +104715,6 @@ static int vdbeSorterSort(SortSubtask *pTask, SorterList *pList){ p->u.pNext = 0; for(i=0; aSlot[i]; i++){ p = vdbeSorterMerge(pTask, p, aSlot[i]); - /* ,--Each aSlot[] holds twice as much as the previous. So we cannot use - ** | up all 64 aSlots[] with only a 64-bit address space. - ** v */ - assert( iop on success */ Table *pTab = 0; /* Table holding the row */ + Column *pCol; /* A column of pTab */ ExprList *pFJMatch = 0; /* Matches for FULL JOIN .. USING */ const char *zCol = pRight->u.zToken; @@ -108377,6 +107557,7 @@ static int lookupName( if( pSrcList ){ for(i=0, pItem=pSrcList->a; inSrc; i++, pItem++){ + u8 hCol; pTab = pItem->pSTab; assert( pTab!=0 && pTab->zName!=0 ); assert( pTab->nCol>0 || pParse->nErr ); @@ -108464,38 +107645,43 @@ static int lookupName( sqlite3RenameTokenRemap(pParse, 0, (void*)&pExpr->y.pTab); } } - j = sqlite3ColumnIndex(pTab, zCol); - if( j>=0 ){ - if( cnt>0 ){ - if( pItem->fg.isUsing==0 - || sqlite3IdListIndex(pItem->u3.pUsing, zCol)<0 - ){ - /* Two or more tables have the same column name which is - ** not joined by USING. This is an error. Signal as much - ** by clearing pFJMatch and letting cnt go above 1. */ - sqlite3ExprListDelete(db, pFJMatch); - pFJMatch = 0; - }else - if( (pItem->fg.jointype & JT_RIGHT)==0 ){ - /* An INNER or LEFT JOIN. Use the left-most table */ - continue; - }else - if( (pItem->fg.jointype & JT_LEFT)==0 ){ - /* A RIGHT JOIN. Use the right-most table */ - cnt = 0; - sqlite3ExprListDelete(db, pFJMatch); - pFJMatch = 0; - }else{ - /* For a FULL JOIN, we must construct a coalesce() func */ - extendFJMatch(pParse, &pFJMatch, pMatch, pExpr->iColumn); + hCol = sqlite3StrIHash(zCol); + for(j=0, pCol=pTab->aCol; jnCol; j++, pCol++){ + if( pCol->hName==hCol + && sqlite3StrICmp(pCol->zCnName, zCol)==0 + ){ + if( cnt>0 ){ + if( pItem->fg.isUsing==0 + || sqlite3IdListIndex(pItem->u3.pUsing, zCol)<0 + ){ + /* Two or more tables have the same column name which is + ** not joined by USING. This is an error. Signal as much + ** by clearing pFJMatch and letting cnt go above 1. */ + sqlite3ExprListDelete(db, pFJMatch); + pFJMatch = 0; + }else + if( (pItem->fg.jointype & JT_RIGHT)==0 ){ + /* An INNER or LEFT JOIN. Use the left-most table */ + continue; + }else + if( (pItem->fg.jointype & JT_LEFT)==0 ){ + /* A RIGHT JOIN. Use the right-most table */ + cnt = 0; + sqlite3ExprListDelete(db, pFJMatch); + pFJMatch = 0; + }else{ + /* For a FULL JOIN, we must construct a coalesce() func */ + extendFJMatch(pParse, &pFJMatch, pMatch, pExpr->iColumn); + } } - } - cnt++; - pMatch = pItem; - /* Substitute the rowid (column -1) for the INTEGER PRIMARY KEY */ - pExpr->iColumn = j==pTab->iPKey ? -1 : (i16)j; - if( pItem->fg.isNestedFrom ){ - sqlite3SrcItemColumnUsed(pItem, j); + cnt++; + pMatch = pItem; + /* Substitute the rowid (column -1) for the INTEGER PRIMARY KEY */ + pExpr->iColumn = j==pTab->iPKey ? -1 : (i16)j; + if( pItem->fg.isNestedFrom ){ + sqlite3SrcItemColumnUsed(pItem, j); + } + break; } } if( 0==cnt && VisibleRowid(pTab) ){ @@ -108585,18 +107771,23 @@ static int lookupName( if( pTab ){ int iCol; + u8 hCol = sqlite3StrIHash(zCol); pSchema = pTab->pSchema; cntTab++; - iCol = sqlite3ColumnIndex(pTab, zCol); - if( iCol>=0 ){ - if( pTab->iPKey==iCol ) iCol = -1; - }else{ - if( sqlite3IsRowid(zCol) && VisibleRowid(pTab) ){ - iCol = -1; - }else{ - iCol = pTab->nCol; + for(iCol=0, pCol=pTab->aCol; iColnCol; iCol++, pCol++){ + if( pCol->hName==hCol + && sqlite3StrICmp(pCol->zCnName, zCol)==0 + ){ + if( iCol==pTab->iPKey ){ + iCol = -1; + } + break; } } + if( iCol>=pTab->nCol && sqlite3IsRowid(zCol) && VisibleRowid(pTab) ){ + /* IMP: R-51414-32910 */ + iCol = -1; + } if( iColnCol ){ cnt++; pMatch = 0; @@ -109235,12 +108426,13 @@ static int resolveExprStep(Walker *pWalker, Expr *pExpr){ ** sqlite_version() that might change over time cannot be used ** in an index or generated column. Curiously, they can be used ** in a CHECK constraint. SQLServer, MySQL, and PostgreSQL all - ** allow this. */ + ** all this. */ sqlite3ResolveNotValid(pParse, pNC, "non-deterministic functions", NC_IdxExpr|NC_PartIdx|NC_GenCol, 0, pExpr); }else{ assert( (NC_SelfRef & 0xff)==NC_SelfRef ); /* Must fit in 8 bits */ pExpr->op2 = pNC->ncFlags & NC_SelfRef; + if( pNC->ncFlags & NC_FromDDL ) ExprSetProperty(pExpr, EP_FromDDL); } if( (pDef->funcFlags & SQLITE_FUNC_INTERNAL)!=0 && pParse->nested==0 @@ -109256,7 +108448,6 @@ static int resolveExprStep(Walker *pWalker, Expr *pExpr){ if( (pDef->funcFlags & (SQLITE_FUNC_DIRECT|SQLITE_FUNC_UNSAFE))!=0 && !IN_RENAME_OBJECT ){ - if( pNC->ncFlags & NC_FromDDL ) ExprSetProperty(pExpr, EP_FromDDL); sqlite3ExprFunctionUsable(pParse, pExpr, pDef); } } @@ -110310,22 +109501,20 @@ SQLITE_PRIVATE int sqlite3ResolveSelfReference( Expr *pExpr, /* Expression to resolve. May be NULL. */ ExprList *pList /* Expression list to resolve. May be NULL. */ ){ - SrcList *pSrc; /* Fake SrcList for pParse->pNewTable */ + SrcList sSrc; /* Fake SrcList for pParse->pNewTable */ NameContext sNC; /* Name context for pParse->pNewTable */ int rc; - u8 srcSpace[SZ_SRCLIST_1]; /* Memory space for the fake SrcList */ assert( type==0 || pTab!=0 ); assert( type==NC_IsCheck || type==NC_PartIdx || type==NC_IdxExpr || type==NC_GenCol || pTab==0 ); memset(&sNC, 0, sizeof(sNC)); - pSrc = (SrcList*)srcSpace; - memset(pSrc, 0, SZ_SRCLIST_1); + memset(&sSrc, 0, sizeof(sSrc)); if( pTab ){ - pSrc->nSrc = 1; - pSrc->a[0].zName = pTab->zName; - pSrc->a[0].pSTab = pTab; - pSrc->a[0].iCursor = -1; + sSrc.nSrc = 1; + sSrc.a[0].zName = pTab->zName; + sSrc.a[0].pSTab = pTab; + sSrc.a[0].iCursor = -1; if( pTab->pSchema!=pParse->db->aDb[1].pSchema ){ /* Cause EP_FromDDL to be set on TK_FUNCTION nodes of non-TEMP ** schema elements */ @@ -110333,7 +109522,7 @@ SQLITE_PRIVATE int sqlite3ResolveSelfReference( } } sNC.pParse = pParse; - sNC.pSrcList = pSrc; + sNC.pSrcList = &sSrc; sNC.ncFlags = type | NC_IsDDL; if( (rc = sqlite3ResolveExprNames(&sNC, pExpr))!=SQLITE_OK ) return rc; if( pList ) rc = sqlite3ResolveExprListNames(&sNC, pList); @@ -110417,9 +109606,7 @@ SQLITE_PRIVATE char sqlite3ExprAffinity(const Expr *pExpr){ pExpr->pLeft->x.pSelect->pEList->a[pExpr->iColumn].pExpr ); } - if( op==TK_VECTOR - || (op==TK_FUNCTION && pExpr->affExpr==SQLITE_AFF_DEFER) - ){ + if( op==TK_VECTOR ){ assert( ExprUseXList(pExpr) ); return sqlite3ExprAffinity(pExpr->x.pList->a[0].pExpr); } @@ -110612,9 +109799,7 @@ SQLITE_PRIVATE CollSeq *sqlite3ExprCollSeq(Parse *pParse, const Expr *pExpr){ p = p->pLeft; continue; } - if( op==TK_VECTOR - || (op==TK_FUNCTION && p->affExpr==SQLITE_AFF_DEFER) - ){ + if( op==TK_VECTOR ){ assert( ExprUseXList(p) ); p = p->x.pList->a[0].pExpr; continue; @@ -111488,7 +110673,7 @@ SQLITE_PRIVATE Expr *sqlite3ExprAnd(Parse *pParse, Expr *pLeft, Expr *pRight){ return pLeft; }else{ u32 f = pLeft->flags | pRight->flags; - if( (f&(EP_OuterON|EP_InnerON|EP_IsFalse|EP_HasFunc))==EP_IsFalse + if( (f&(EP_OuterON|EP_InnerON|EP_IsFalse))==EP_IsFalse && !IN_RENAME_OBJECT ){ sqlite3ExprDeferredDelete(pParse, pLeft); @@ -112086,7 +111271,7 @@ static Expr *exprDup( SQLITE_PRIVATE With *sqlite3WithDup(sqlite3 *db, With *p){ With *pRet = 0; if( p ){ - sqlite3_int64 nByte = SZ_WITH(p->nCte); + sqlite3_int64 nByte = sizeof(*p) + sizeof(p->a[0]) * (p->nCte-1); pRet = sqlite3DbMallocZero(db, nByte); if( pRet ){ int i; @@ -112197,6 +111382,7 @@ SQLITE_PRIVATE ExprList *sqlite3ExprListDup(sqlite3 *db, const ExprList *p, int } pItem->zEName = sqlite3DbStrDup(db, pOldItem->zEName); pItem->fg = pOldItem->fg; + pItem->fg.done = 0; pItem->u = pOldItem->u; } return pNew; @@ -112213,9 +111399,11 @@ SQLITE_PRIVATE ExprList *sqlite3ExprListDup(sqlite3 *db, const ExprList *p, int SQLITE_PRIVATE SrcList *sqlite3SrcListDup(sqlite3 *db, const SrcList *p, int flags){ SrcList *pNew; int i; + int nByte; assert( db!=0 ); if( p==0 ) return 0; - pNew = sqlite3DbMallocRawNN(db, SZ_SRCLIST(p->nSrc) ); + nByte = sizeof(*p) + (p->nSrc>0 ? sizeof(p->a[0]) * (p->nSrc-1) : 0); + pNew = sqlite3DbMallocRawNN(db, nByte ); if( pNew==0 ) return 0; pNew->nSrc = pNew->nAlloc = p->nSrc; for(i=0; inSrc; i++){ @@ -112277,7 +111465,7 @@ SQLITE_PRIVATE IdList *sqlite3IdListDup(sqlite3 *db, const IdList *p){ int i; assert( db!=0 ); if( p==0 ) return 0; - pNew = sqlite3DbMallocRawNN(db, SZ_IDLIST(p->nId)); + pNew = sqlite3DbMallocRawNN(db, sizeof(*pNew)+(p->nId-1)*sizeof(p->a[0]) ); if( pNew==0 ) return 0; pNew->nId = p->nId; for(i=0; inId; i++){ @@ -112309,7 +111497,7 @@ SQLITE_PRIVATE Select *sqlite3SelectDup(sqlite3 *db, const Select *pDup, int fla pNew->pLimit = sqlite3ExprDup(db, p->pLimit, flags); pNew->iLimit = 0; pNew->iOffset = 0; - pNew->selFlags = p->selFlags & ~(u32)SF_UsesEphemeral; + pNew->selFlags = p->selFlags & ~SF_UsesEphemeral; pNew->addrOpenEphm[0] = -1; pNew->addrOpenEphm[1] = -1; pNew->nSelectRow = p->nSelectRow; @@ -112361,7 +111549,7 @@ SQLITE_PRIVATE SQLITE_NOINLINE ExprList *sqlite3ExprListAppendNew( struct ExprList_item *pItem; ExprList *pList; - pList = sqlite3DbMallocRawNN(db, SZ_EXPRLIST(4)); + pList = sqlite3DbMallocRawNN(db, sizeof(ExprList)+sizeof(pList->a[0])*4 ); if( pList==0 ){ sqlite3ExprDelete(db, pExpr); return 0; @@ -112381,7 +111569,8 @@ SQLITE_PRIVATE SQLITE_NOINLINE ExprList *sqlite3ExprListAppendGrow( struct ExprList_item *pItem; ExprList *pNew; pList->nAlloc *= 2; - pNew = sqlite3DbRealloc(db, pList, SZ_EXPRLIST(pList->nAlloc)); + pNew = sqlite3DbRealloc(db, pList, + sizeof(*pList)+(pList->nAlloc-1)*sizeof(pList->a[0])); if( pNew==0 ){ sqlite3ExprListDelete(db, pList); sqlite3ExprDelete(db, pExpr); @@ -113310,7 +112499,13 @@ SQLITE_PRIVATE const char *sqlite3RowidAlias(Table *pTab){ int ii; assert( VisibleRowid(pTab) ); for(ii=0; iinCol; iCol++){ + if( sqlite3_stricmp(azOpt[ii], pTab->aCol[iCol].zCnName)==0 ) break; + } + if( iCol==pTab->nCol ){ + return azOpt[ii]; + } } return 0; } @@ -113714,7 +112909,7 @@ static char *exprINAffinity(Parse *pParse, const Expr *pExpr){ char *zRet; assert( pExpr->op==TK_IN ); - zRet = sqlite3DbMallocRaw(pParse->db, 1+(i64)nVal); + zRet = sqlite3DbMallocRaw(pParse->db, nVal+1); if( zRet ){ int i; for(i=0; idb, pCopy); sqlite3DbFree(pParse->db, dest.zAffSdst); if( addrBloom ){ - /* Remember that location of the Bloom filter in the P3 operand - ** of the OP_Once that began this subroutine. tag-202407032019 */ sqlite3VdbeGetOp(v, addrOnce)->p3 = dest.iSDParm2; if( dest.iSDParm2==0 ){ - /* If the Bloom filter won't actually be used, keep it small */ - sqlite3VdbeGetOp(v, addrBloom)->p1 = 10; + sqlite3VdbeChangeToNoop(v, addrBloom); + }else{ + sqlite3VdbeGetOp(v, addrOnce)->p3 = dest.iSDParm2; } } if( rc ){ @@ -114426,7 +113620,7 @@ static void sqlite3ExprCodeIN( if( ExprHasProperty(pExpr, EP_Subrtn) ){ const VdbeOp *pOp = sqlite3VdbeGetOp(v, pExpr->y.sub.iAddr); assert( pOp->opcode==OP_Once || pParse->nErr ); - if( pOp->opcode==OP_Once && pOp->p3>0 ){ /* tag-202407032019 */ + if( pOp->opcode==OP_Once && pOp->p3>0 ){ assert( OptimizationEnabled(pParse->db, SQLITE_BloomFilter) ); sqlite3VdbeAddOp4Int(v, OP_Filter, pOp->p3, destIfFalse, rLhs, nVector); VdbeCoverage(v); @@ -115018,7 +114212,7 @@ static SQLITE_NOINLINE int sqlite3IndexedExprLookup( /* -** Expression pExpr is guaranteed to be a TK_COLUMN or equivalent. This +** Expresion pExpr is guaranteed to be a TK_COLUMN or equivalent. This ** function checks the Parse.pIdxPartExpr list to see if this column ** can be replaced with a constant value. If so, it generates code to ** put the constant value in a register (ideally, but not necessarily, @@ -116275,11 +115469,11 @@ SQLITE_PRIVATE void sqlite3ExprIfTrue(Parse *pParse, Expr *pExpr, int dest, int assert( TK_ISNULL==OP_IsNull ); testcase( op==TK_ISNULL ); assert( TK_NOTNULL==OP_NotNull ); testcase( op==TK_NOTNULL ); r1 = sqlite3ExprCodeTemp(pParse, pExpr->pLeft, ®Free1); - assert( regFree1==0 || regFree1==r1 ); - if( regFree1 ) sqlite3VdbeTypeofColumn(v, r1); + sqlite3VdbeTypeofColumn(v, r1); sqlite3VdbeAddOp2(v, op, r1, dest); VdbeCoverageIf(v, op==TK_ISNULL); VdbeCoverageIf(v, op==TK_NOTNULL); + testcase( regFree1==0 ); break; } case TK_BETWEEN: { @@ -116450,11 +115644,11 @@ SQLITE_PRIVATE void sqlite3ExprIfFalse(Parse *pParse, Expr *pExpr, int dest, int case TK_ISNULL: case TK_NOTNULL: { r1 = sqlite3ExprCodeTemp(pParse, pExpr->pLeft, ®Free1); - assert( regFree1==0 || regFree1==r1 ); - if( regFree1 ) sqlite3VdbeTypeofColumn(v, r1); + sqlite3VdbeTypeofColumn(v, r1); sqlite3VdbeAddOp2(v, op, r1, dest); testcase( op==TK_ISNULL ); VdbeCoverageIf(v, op==TK_ISNULL); testcase( op==TK_NOTNULL ); VdbeCoverageIf(v, op==TK_NOTNULL); + testcase( regFree1==0 ); break; } case TK_BETWEEN: { @@ -117354,9 +116548,7 @@ static void findOrCreateAggInfoColumn( ){ struct AggInfo_col *pCol; int k; - int mxTerm = pParse->db->aLimit[SQLITE_LIMIT_COLUMN]; - assert( mxTerm <= SMXV(i16) ); assert( pAggInfo->iFirstReg==0 ); pCol = pAggInfo->aCol; for(k=0; knColumn; k++, pCol++){ @@ -117374,10 +116566,6 @@ static void findOrCreateAggInfoColumn( assert( pParse->db->mallocFailed ); return; } - if( k>mxTerm ){ - sqlite3ErrorMsg(pParse, "more than %d aggregate terms", mxTerm); - k = mxTerm; - } pCol = &pAggInfo->aCol[k]; assert( ExprUseYTab(pExpr) ); pCol->pTab = pExpr->y.pTab; @@ -117411,7 +116599,6 @@ fix_up_expr: if( pExpr->op==TK_COLUMN ){ pExpr->op = TK_AGG_COLUMN; } - assert( k <= SMXV(pExpr->iAgg) ); pExpr->iAgg = (i16)k; } @@ -117496,19 +116683,13 @@ static int analyzeAggregate(Walker *pWalker, Expr *pExpr){ ** function that is already in the pAggInfo structure */ struct AggInfo_func *pItem = pAggInfo->aFunc; - int mxTerm = pParse->db->aLimit[SQLITE_LIMIT_COLUMN]; - assert( mxTerm <= SMXV(i16) ); for(i=0; inFunc; i++, pItem++){ if( NEVER(pItem->pFExpr==pExpr) ) break; if( sqlite3ExprCompare(0, pItem->pFExpr, pExpr, -1)==0 ){ break; } } - if( i>mxTerm ){ - sqlite3ErrorMsg(pParse, "more than %d aggregate terms", mxTerm); - i = mxTerm; - assert( inFunc ); - }else if( i>=pAggInfo->nFunc ){ + if( i>=pAggInfo->nFunc ){ /* pExpr is original. Make a new entry in pAggInfo->aFunc[] */ u8 enc = ENC(pParse->db); @@ -117562,7 +116743,6 @@ static int analyzeAggregate(Walker *pWalker, Expr *pExpr){ */ assert( !ExprHasProperty(pExpr, EP_TokenOnly|EP_Reduced) ); ExprSetVVAProperty(pExpr, EP_NoReduce); - assert( i <= SMXV(pExpr->iAgg) ); pExpr->iAgg = (i16)i; pExpr->pAggInfo = pAggInfo; return WRC_Prune; @@ -118273,13 +117453,13 @@ SQLITE_PRIVATE void sqlite3AlterBeginAddColumn(Parse *pParse, SrcList *pSrc){ assert( pNew->nCol>0 ); nAlloc = (((pNew->nCol-1)/8)*8)+8; assert( nAlloc>=pNew->nCol && nAlloc%8==0 && nAlloc-pNew->nCol<8 ); - pNew->aCol = (Column*)sqlite3DbMallocZero(db, sizeof(Column)*(u32)nAlloc); + pNew->aCol = (Column*)sqlite3DbMallocZero(db, sizeof(Column)*nAlloc); pNew->zName = sqlite3MPrintf(db, "sqlite_altertab_%s", pTab->zName); if( !pNew->aCol || !pNew->zName ){ assert( db->mallocFailed ); goto exit_begin_add_column; } - memcpy(pNew->aCol, pTab->aCol, sizeof(Column)*(size_t)pNew->nCol); + memcpy(pNew->aCol, pTab->aCol, sizeof(Column)*pNew->nCol); for(i=0; inCol; i++){ Column *pCol = &pNew->aCol[i]; pCol->zCnName = sqlite3DbStrDup(db, pCol->zCnName); @@ -118374,8 +117554,10 @@ SQLITE_PRIVATE void sqlite3AlterRenameColumn( ** altered. Set iCol to be the index of the column being renamed */ zOld = sqlite3NameFromToken(db, pOld); if( !zOld ) goto exit_rename_column; - iCol = sqlite3ColumnIndex(pTab, zOld); - if( iCol<0 ){ + for(iCol=0; iColnCol; iCol++){ + if( 0==sqlite3StrICmp(pTab->aCol[iCol].zCnName, zOld) ) break; + } + if( iCol==pTab->nCol ){ sqlite3ErrorMsg(pParse, "no such column: \"%T\"", pOld); goto exit_rename_column; } @@ -118878,7 +118060,6 @@ static int renameParseSql( int bTemp /* True if SQL is from temp schema */ ){ int rc; - u64 flags; sqlite3ParseObjectInit(p, db); if( zSql==0 ){ @@ -118887,21 +118068,11 @@ static int renameParseSql( if( sqlite3StrNICmp(zSql,"CREATE ",7)!=0 ){ return SQLITE_CORRUPT_BKPT; } - if( bTemp ){ - db->init.iDb = 1; - }else{ - int iDb = sqlite3FindDbName(db, zDb); - assert( iDb>=0 && iDb<=0xff ); - db->init.iDb = (u8)iDb; - } + db->init.iDb = bTemp ? 1 : sqlite3FindDbName(db, zDb); p->eParseMode = PARSE_MODE_RENAME; p->db = db; p->nQueryLoop = 1; - flags = db->flags; - testcase( (db->flags & SQLITE_Comments)==0 && strstr(zSql," /* ")!=0 ); - db->flags |= SQLITE_Comments; rc = sqlite3RunParser(p, zSql); - db->flags = flags; if( db->mallocFailed ) rc = SQLITE_NOMEM; if( rc==SQLITE_OK && NEVER(p->pNewTable==0 && p->pNewIndex==0 && p->pNewTrigger==0) @@ -118964,11 +118135,10 @@ static int renameEditSql( nQuot = sqlite3Strlen30(zQuot)-1; } - assert( nQuot>=nNew && nSql>=0 && nNew>=0 ); - zOut = sqlite3DbMallocZero(db, (u64)nSql + pRename->nList*(u64)nQuot + 1); + assert( nQuot>=nNew ); + zOut = sqlite3DbMallocZero(db, nSql + pRename->nList*nQuot + 1); }else{ - assert( nSql>0 ); - zOut = (char*)sqlite3DbMallocZero(db, (2*(u64)nSql + 1) * 3); + zOut = (char*)sqlite3DbMallocZero(db, (nSql*2+1) * 3); if( zOut ){ zBuf1 = &zOut[nSql*2+1]; zBuf2 = &zOut[nSql*4+2]; @@ -118980,17 +118150,16 @@ static int renameEditSql( ** with the new column name, or with single-quoted versions of themselves. ** All that remains is to construct and return the edited SQL string. */ if( zOut ){ - i64 nOut = nSql; - assert( nSql>0 ); - memcpy(zOut, zSql, (size_t)nSql); + int nOut = nSql; + memcpy(zOut, zSql, nSql); while( pRename->pList ){ int iOff; /* Offset of token to replace in zOut */ - i64 nReplace; + u32 nReplace; const char *zReplace; RenameToken *pBest = renameColumnTokenNext(pRename); if( zNew ){ - if( bQuote==0 && sqlite3IsIdChar(*(u8*)pBest->t.z) ){ + if( bQuote==0 && sqlite3IsIdChar(*pBest->t.z) ){ nReplace = nNew; zReplace = zNew; }else{ @@ -119008,15 +118177,14 @@ static int renameEditSql( memcpy(zBuf1, pBest->t.z, pBest->t.n); zBuf1[pBest->t.n] = 0; sqlite3Dequote(zBuf1); - assert( nSql < 0x15555554 /* otherwise malloc would have failed */ ); - sqlite3_snprintf((int)(nSql*2), zBuf2, "%Q%s", zBuf1, + sqlite3_snprintf(nSql*2, zBuf2, "%Q%s", zBuf1, pBest->t.z[pBest->t.n]=='\'' ? " " : "" ); zReplace = zBuf2; nReplace = sqlite3Strlen30(zReplace); } - iOff = (int)(pBest->t.z - zSql); + iOff = pBest->t.z - zSql; if( pBest->t.n!=nReplace ){ memmove(&zOut[iOff + nReplace], &zOut[iOff + pBest->t.n], nOut - (iOff + pBest->t.n) @@ -119042,12 +118210,11 @@ static int renameEditSql( ** Set all pEList->a[].fg.eEName fields in the expression-list to val. */ static void renameSetENames(ExprList *pEList, int val){ - assert( val==ENAME_NAME || val==ENAME_TAB || val==ENAME_SPAN ); if( pEList ){ int i; for(i=0; inExpr; i++){ assert( val==ENAME_NAME || pEList->a[i].fg.eEName==ENAME_NAME ); - pEList->a[i].fg.eEName = val&0x3; + pEList->a[i].fg.eEName = val; } } } @@ -119304,7 +118471,7 @@ static void renameColumnFunc( if( sParse.pNewTable ){ if( IsView(sParse.pNewTable) ){ Select *pSelect = sParse.pNewTable->u.view.pSelect; - pSelect->selFlags &= ~(u32)SF_View; + pSelect->selFlags &= ~SF_View; sParse.rc = SQLITE_OK; sqlite3SelectPrep(&sParse, pSelect, 0); rc = (db->mallocFailed ? SQLITE_NOMEM : sParse.rc); @@ -119522,7 +118689,7 @@ static void renameTableFunc( sNC.pParse = &sParse; assert( pSelect->selFlags & SF_View ); - pSelect->selFlags &= ~(u32)SF_View; + pSelect->selFlags &= ~SF_View; sqlite3SelectPrep(&sParse, pTab->u.view.pSelect, &sNC); if( sParse.nErr ){ rc = sParse.rc; @@ -119695,7 +118862,7 @@ static void renameQuotefixFunc( if( sParse.pNewTable ){ if( IsView(sParse.pNewTable) ){ Select *pSelect = sParse.pNewTable->u.view.pSelect; - pSelect->selFlags &= ~(u32)SF_View; + pSelect->selFlags &= ~SF_View; sParse.rc = SQLITE_OK; sqlite3SelectPrep(&sParse, pSelect, 0); rc = (db->mallocFailed ? SQLITE_NOMEM : sParse.rc); @@ -119794,10 +118961,10 @@ static void renameTableTest( if( zDb && zInput ){ int rc; Parse sParse; - u64 flags = db->flags; + int flags = db->flags; if( bNoDQS ) db->flags &= ~(SQLITE_DqsDML|SQLITE_DqsDDL); rc = renameParseSql(&sParse, zDb, db, zInput, bTemp); - db->flags = flags; + db->flags |= (flags & (SQLITE_DqsDML|SQLITE_DqsDDL)); if( rc==SQLITE_OK ){ if( isLegacy==0 && sParse.pNewTable && IsView(sParse.pNewTable) ){ NameContext sNC; @@ -120289,8 +119456,7 @@ static void openStatTable( sqlite3NestedParse(pParse, "CREATE TABLE %Q.%s(%s)", pDb->zDbSName, zTab, aTable[i].zCols ); - assert( pParse->isCreate || pParse->nErr ); - aRoot[i] = (u32)pParse->u1.cr.regRoot; + aRoot[i] = (u32)pParse->regRoot; aCreateTbl[i] = OPFLAG_P2ISREG; } }else{ @@ -120481,7 +119647,7 @@ static void statInit( int nCol; /* Number of columns in index being sampled */ int nKeyCol; /* Number of key columns */ int nColUp; /* nCol rounded up for alignment */ - i64 n; /* Bytes of space to allocate */ + int n; /* Bytes of space to allocate */ sqlite3 *db = sqlite3_context_db_handle(context); /* Database connection */ #ifdef SQLITE_ENABLE_STAT4 /* Maximum number of samples. 0 if STAT4 data is not collected */ @@ -120517,7 +119683,7 @@ static void statInit( p->db = db; p->nEst = sqlite3_value_int64(argv[2]); p->nRow = 0; - p->nLimit = sqlite3_value_int(argv[3]); + p->nLimit = sqlite3_value_int64(argv[3]); p->nCol = nCol; p->nKeyCol = nKeyCol; p->nSkipAhead = 0; @@ -121650,6 +120816,16 @@ static void decodeIntArray( while( z[0]!=0 && z[0]!=' ' ) z++; while( z[0]==' ' ) z++; } + + /* Set the bLowQual flag if the peak number of rows obtained + ** from a full equality match is so large that a full table scan + ** seems likely to be faster than using the index. + */ + if( aLog[0] > 66 /* Index has more than 100 rows */ + && aLog[0] <= aLog[nOut-1] /* And only a single value seen */ + ){ + pIndex->bLowQual = 1; + } } } @@ -122245,7 +121421,7 @@ static void attachFunc( if( aNew==0 ) return; memcpy(aNew, db->aDb, sizeof(db->aDb[0])*2); }else{ - aNew = sqlite3DbRealloc(db, db->aDb, sizeof(db->aDb[0])*(1+(i64)db->nDb)); + aNew = sqlite3DbRealloc(db, db->aDb, sizeof(db->aDb[0])*(db->nDb+1) ); if( aNew==0 ) return; } db->aDb = aNew; @@ -122316,13 +121492,6 @@ static void attachFunc( sqlite3BtreeEnterAll(db); db->init.iDb = 0; db->mDbFlags &= ~(DBFLAG_SchemaKnownOk); -#ifdef SQLITE_ENABLE_SETLK_TIMEOUT - if( db->setlkFlags & SQLITE_SETLK_BLOCK_ON_CONNECT ){ - int val = 1; - sqlite3_file *fd = sqlite3PagerFile(sqlite3BtreePager(pNew->pBt)); - sqlite3OsFileControlHint(fd, SQLITE_FCNTL_BLOCK_ON_CONNECT, &val); - } -#endif if( !REOPEN_AS_MEMDB(db) ){ rc = sqlite3Init(db, &zErrDyn); } @@ -123045,7 +122214,6 @@ static SQLITE_NOINLINE void lockTable( } } - assert( pToplevel->nTableLock < 0x7fff0000 ); nBytes = sizeof(TableLock) * (pToplevel->nTableLock+1); pToplevel->aTableLock = sqlite3DbReallocOrFree(pToplevel->db, pToplevel->aTableLock, nBytes); @@ -123146,12 +122314,10 @@ SQLITE_PRIVATE void sqlite3FinishCoding(Parse *pParse){ || sqlite3VdbeAssertMayAbort(v, pParse->mayAbort)); if( v ){ if( pParse->bReturning ){ - Returning *pReturning; + Returning *pReturning = pParse->u1.pReturning; int addrRewind; int reg; - assert( !pParse->isCreate ); - pReturning = pParse->u1.d.pReturning; if( pReturning->nRetCol ){ sqlite3VdbeAddOp0(v, OP_FkCheck); addrRewind = @@ -123227,9 +122393,7 @@ SQLITE_PRIVATE void sqlite3FinishCoding(Parse *pParse){ } if( pParse->bReturning ){ - Returning *pRet; - assert( !pParse->isCreate ); - pRet = pParse->u1.d.pReturning; + Returning *pRet = pParse->u1.pReturning; if( pRet->nRetCol ){ sqlite3VdbeAddOp2(v, OP_OpenEphemeral, pRet->iRetCur, pRet->nRetCol); } @@ -124044,16 +123208,10 @@ SQLITE_PRIVATE Index *sqlite3PrimaryKeyIndex(Table *pTab){ ** find the (first) offset of that column in index pIdx. Or return -1 ** if column iCol is not used in index pIdx. */ -SQLITE_PRIVATE int sqlite3TableColumnToIndex(Index *pIdx, int iCol){ +SQLITE_PRIVATE i16 sqlite3TableColumnToIndex(Index *pIdx, i16 iCol){ int i; - i16 iCol16; - assert( iCol>=(-1) && iCol<=SQLITE_MAX_COLUMN ); - assert( pIdx->nColumn<=SQLITE_MAX_COLUMN+1 ); - iCol16 = iCol; for(i=0; inColumn; i++){ - if( iCol16==pIdx->aiColumn[i] ){ - return i; - } + if( iCol==pIdx->aiColumn[i] ) return i; } return -1; } @@ -124307,9 +123465,8 @@ SQLITE_PRIVATE void sqlite3StartTable( /* If the file format and encoding in the database have not been set, ** set them now. */ - assert( pParse->isCreate ); - reg1 = pParse->u1.cr.regRowid = ++pParse->nMem; - reg2 = pParse->u1.cr.regRoot = ++pParse->nMem; + reg1 = pParse->regRowid = ++pParse->nMem; + reg2 = pParse->regRoot = ++pParse->nMem; reg3 = ++pParse->nMem; sqlite3VdbeAddOp3(v, OP_ReadCookie, iDb, reg3, BTREE_FILE_FORMAT); sqlite3VdbeUsesBtree(v, iDb); @@ -124324,8 +123481,8 @@ SQLITE_PRIVATE void sqlite3StartTable( ** The record created does not contain anything yet. It will be replaced ** by the real entry in code generated at sqlite3EndTable(). ** - ** The rowid for the new entry is left in register pParse->u1.cr.regRowid. - ** The root page of the new table is left in reg pParse->u1.cr.regRoot. + ** The rowid for the new entry is left in register pParse->regRowid. + ** The root page number of the new table is left in reg pParse->regRoot. ** The rowid and root page number values are needed by the code that ** sqlite3EndTable will generate. */ @@ -124336,7 +123493,7 @@ SQLITE_PRIVATE void sqlite3StartTable( #endif { assert( !pParse->bReturning ); - pParse->u1.cr.addrCrTab = + pParse->u1.addrCrTab = sqlite3VdbeAddOp3(v, OP_CreateBtree, iDb, reg2, BTREE_INTKEY); } sqlite3OpenSchemaTable(pParse, iDb); @@ -124414,8 +123571,7 @@ SQLITE_PRIVATE void sqlite3AddReturning(Parse *pParse, ExprList *pList){ sqlite3ExprListDelete(db, pList); return; } - assert( !pParse->isCreate ); - pParse->u1.d.pReturning = pRet; + pParse->u1.pReturning = pRet; pRet->pParse = pParse; pRet->pReturnEL = pList; sqlite3ParserAddCleanup(pParse, sqlite3DeleteReturning, pRet); @@ -124457,6 +123613,7 @@ SQLITE_PRIVATE void sqlite3AddColumn(Parse *pParse, Token sName, Token sType){ char *zType; Column *pCol; sqlite3 *db = pParse->db; + u8 hName; Column *aNew; u8 eType = COLTYPE_CUSTOM; u8 szEst = 1; @@ -124510,10 +123667,13 @@ SQLITE_PRIVATE void sqlite3AddColumn(Parse *pParse, Token sName, Token sType){ memcpy(z, sName.z, sName.n); z[sName.n] = 0; sqlite3Dequote(z); - if( p->nCol && sqlite3ColumnIndex(p, z)>=0 ){ - sqlite3ErrorMsg(pParse, "duplicate column name: %s", z); - sqlite3DbFree(db, z); - return; + hName = sqlite3StrIHash(z); + for(i=0; inCol; i++){ + if( p->aCol[i].hName==hName && sqlite3StrICmp(z, p->aCol[i].zCnName)==0 ){ + sqlite3ErrorMsg(pParse, "duplicate column name: %s", z); + sqlite3DbFree(db, z); + return; + } } aNew = sqlite3DbRealloc(db,p->aCol,((i64)p->nCol+1)*sizeof(p->aCol[0])); if( aNew==0 ){ @@ -124524,7 +123684,7 @@ SQLITE_PRIVATE void sqlite3AddColumn(Parse *pParse, Token sName, Token sType){ pCol = &p->aCol[p->nCol]; memset(pCol, 0, sizeof(p->aCol[0])); pCol->zCnName = z; - pCol->hName = sqlite3StrIHash(z); + pCol->hName = hName; sqlite3ColumnPropertiesFromName(p, pCol); if( sType.n==0 ){ @@ -124548,14 +123708,9 @@ SQLITE_PRIVATE void sqlite3AddColumn(Parse *pParse, Token sName, Token sType){ pCol->affinity = sqlite3AffinityType(zType, pCol); pCol->colFlags |= COLFLAG_HASTYPE; } - if( p->nCol<=0xff ){ - u8 h = pCol->hName % sizeof(p->aHx); - p->aHx[h] = p->nCol; - } p->nCol++; p->nNVCol++; - assert( pParse->isCreate ); - pParse->u1.cr.constraintName.n = 0; + pParse->constraintName.n = 0; } /* @@ -124819,11 +123974,15 @@ SQLITE_PRIVATE void sqlite3AddPrimaryKey( assert( pCExpr!=0 ); sqlite3StringToId(pCExpr); if( pCExpr->op==TK_ID ){ + const char *zCName; assert( !ExprHasProperty(pCExpr, EP_IntValue) ); - iCol = sqlite3ColumnIndex(pTab, pCExpr->u.zToken); - if( iCol>=0 ){ - pCol = &pTab->aCol[iCol]; - makeColumnPartOfPrimaryKey(pParse, pCol); + zCName = pCExpr->u.zToken; + for(iCol=0; iColnCol; iCol++){ + if( sqlite3StrICmp(zCName, pTab->aCol[iCol].zCnName)==0 ){ + pCol = &pTab->aCol[iCol]; + makeColumnPartOfPrimaryKey(pParse, pCol); + break; + } } } } @@ -124875,10 +124034,8 @@ SQLITE_PRIVATE void sqlite3AddCheckConstraint( && !sqlite3BtreeIsReadonly(db->aDb[db->init.iDb].pBt) ){ pTab->pCheck = sqlite3ExprListAppend(pParse, pTab->pCheck, pCheckExpr); - assert( pParse->isCreate ); - if( pParse->u1.cr.constraintName.n ){ - sqlite3ExprListSetName(pParse, pTab->pCheck, - &pParse->u1.cr.constraintName, 1); + if( pParse->constraintName.n ){ + sqlite3ExprListSetName(pParse, pTab->pCheck, &pParse->constraintName, 1); }else{ Token t; for(zStart++; sqlite3Isspace(zStart[0]); zStart++){} @@ -125073,8 +124230,7 @@ static void identPut(char *z, int *pIdx, char *zSignedIdent){ ** from sqliteMalloc() and must be freed by the calling function. */ static char *createTableStmt(sqlite3 *db, Table *p){ - int i, k, len; - i64 n; + int i, k, n; char *zStmt; char *zSep, *zSep2, *zEnd; Column *pCol; @@ -125098,9 +124254,8 @@ static char *createTableStmt(sqlite3 *db, Table *p){ sqlite3OomFault(db); return 0; } - assert( n>14 && n<=0x7fffffff ); - memcpy(zStmt, "CREATE TABLE ", 13); - k = 13; + sqlite3_snprintf(n, zStmt, "CREATE TABLE "); + k = sqlite3Strlen30(zStmt); identPut(zStmt, &k, p->zName); zStmt[k++] = '('; for(pCol=p->aCol, i=0; inCol; i++, pCol++){ @@ -125112,15 +124267,13 @@ static char *createTableStmt(sqlite3 *db, Table *p){ /* SQLITE_AFF_REAL */ " REAL", /* SQLITE_AFF_FLEXNUM */ " NUM", }; + int len; const char *zType; - len = sqlite3Strlen30(zSep); - assert( k+lenzCnName); - assert( kaffinity-SQLITE_AFF_BLOB >= 0 ); assert( pCol->affinity-SQLITE_AFF_BLOB < ArraySize(azType) ); testcase( pCol->affinity==SQLITE_AFF_BLOB ); @@ -125135,14 +124288,11 @@ static char *createTableStmt(sqlite3 *db, Table *p){ assert( pCol->affinity==SQLITE_AFF_BLOB || pCol->affinity==SQLITE_AFF_FLEXNUM || pCol->affinity==sqlite3AffinityType(zType, 0) ); - assert( k+lennColumn>=N ) return SQLITE_OK; - db = pParse->db; - assert( N>0 ); - assert( N <= SQLITE_MAX_COLUMN*2 /* tag-20250221-1 */ ); - testcase( N==2*pParse->db->aLimit[SQLITE_LIMIT_COLUMN] ); assert( pIdx->isResized==0 ); - nByte = (sizeof(char*) + sizeof(LogEst) + sizeof(i16) + 1)*(u64)N; + nByte = (sizeof(char*) + sizeof(LogEst) + sizeof(i16) + 1)*N; zExtra = sqlite3DbMallocZero(db, nByte); if( zExtra==0 ) return SQLITE_NOMEM_BKPT; memcpy(zExtra, pIdx->azColl, sizeof(char*)*pIdx->nColumn); @@ -125174,7 +124319,7 @@ static int resizeIndexObject(Parse *pParse, Index *pIdx, int N){ zExtra += sizeof(i16)*N; memcpy(zExtra, pIdx->aSortOrder, pIdx->nColumn); pIdx->aSortOrder = (u8*)zExtra; - pIdx->nColumn = (u16)N; /* See tag-20250221-1 above for proof of safety */ + pIdx->nColumn = N; pIdx->isResized = 1; return SQLITE_OK; } @@ -125340,9 +124485,9 @@ static void convertToWithoutRowidTable(Parse *pParse, Table *pTab){ ** into BTREE_BLOBKEY. */ assert( !pParse->bReturning ); - if( pParse->u1.cr.addrCrTab ){ + if( pParse->u1.addrCrTab ){ assert( v ); - sqlite3VdbeChangeP3(v, pParse->u1.cr.addrCrTab, BTREE_BLOBKEY); + sqlite3VdbeChangeP3(v, pParse->u1.addrCrTab, BTREE_BLOBKEY); } /* Locate the PRIMARY KEY index. Or, if this table was originally @@ -125428,14 +124573,14 @@ static void convertToWithoutRowidTable(Parse *pParse, Table *pTab){ pIdx->nColumn = pIdx->nKeyCol; continue; } - if( resizeIndexObject(pParse, pIdx, pIdx->nKeyCol+n) ) return; + if( resizeIndexObject(db, pIdx, pIdx->nKeyCol+n) ) return; for(i=0, j=pIdx->nKeyCol; inKeyCol, pPk, i) ){ testcase( hasColumn(pIdx->aiColumn, pIdx->nKeyCol, pPk->aiColumn[i]) ); pIdx->aiColumn[j] = pPk->aiColumn[i]; pIdx->azColl[j] = pPk->azColl[i]; if( pPk->aSortOrder[i] ){ - /* See ticket https://sqlite.org/src/info/bba7b69f9849b5bf */ + /* See ticket https://www.sqlite.org/src/info/bba7b69f9849b5bf */ pIdx->bAscKeyBug = 1; } j++; @@ -125452,7 +124597,7 @@ static void convertToWithoutRowidTable(Parse *pParse, Table *pTab){ if( !hasColumn(pPk->aiColumn, nPk, i) && (pTab->aCol[i].colFlags & COLFLAG_VIRTUAL)==0 ) nExtra++; } - if( resizeIndexObject(pParse, pPk, nPk+nExtra) ) return; + if( resizeIndexObject(db, pPk, nPk+nExtra) ) return; for(i=0, j=nPk; inCol; i++){ if( !hasColumn(pPk->aiColumn, j, i) && (pTab->aCol[i].colFlags & COLFLAG_VIRTUAL)==0 @@ -125782,7 +124927,7 @@ SQLITE_PRIVATE void sqlite3EndTable( /* If this is a CREATE TABLE xx AS SELECT ..., execute the SELECT ** statement to populate the new table. The root-page number for the - ** new table is in register pParse->u1.cr.regRoot. + ** new table is in register pParse->regRoot. ** ** Once the SELECT has been coded by sqlite3Select(), it is in a ** suitable state to query for the column names and types to be used @@ -125813,8 +124958,7 @@ SQLITE_PRIVATE void sqlite3EndTable( regRec = ++pParse->nMem; regRowid = ++pParse->nMem; sqlite3MayAbort(pParse); - assert( pParse->isCreate ); - sqlite3VdbeAddOp3(v, OP_OpenWrite, iCsr, pParse->u1.cr.regRoot, iDb); + sqlite3VdbeAddOp3(v, OP_OpenWrite, iCsr, pParse->regRoot, iDb); sqlite3VdbeChangeP5(v, OPFLAG_P2ISREG); addrTop = sqlite3VdbeCurrentAddr(v) + 1; sqlite3VdbeAddOp3(v, OP_InitCoroutine, regYield, 0, addrTop); @@ -125859,7 +125003,6 @@ SQLITE_PRIVATE void sqlite3EndTable( ** schema table. We just need to update that slot with all ** the information we've collected. */ - assert( pParse->isCreate ); sqlite3NestedParse(pParse, "UPDATE %Q." LEGACY_SCHEMA_TABLE " SET type='%s', name=%Q, tbl_name=%Q, rootpage=#%d, sql=%Q" @@ -125868,9 +125011,9 @@ SQLITE_PRIVATE void sqlite3EndTable( zType, p->zName, p->zName, - pParse->u1.cr.regRoot, + pParse->regRoot, zStmt, - pParse->u1.cr.regRowid + pParse->regRowid ); sqlite3DbFree(db, zStmt); sqlite3ChangeCookie(pParse, iDb); @@ -126610,7 +125753,7 @@ SQLITE_PRIVATE void sqlite3CreateForeignKey( }else{ nCol = pFromCol->nExpr; } - nByte = SZ_FKEY(nCol) + pTo->n + 1; + nByte = sizeof(*pFKey) + (nCol-1)*sizeof(pFKey->aCol[0]) + pTo->n + 1; if( pToCol ){ for(i=0; inExpr; i++){ nByte += sqlite3Strlen30(pToCol->a[i].zEName) + 1; @@ -126812,7 +125955,7 @@ static void sqlite3RefillIndex(Parse *pParse, Index *pIndex, int memRootPage){ ** not work for UNIQUE constraint indexes on WITHOUT ROWID tables ** with DESC primary keys, since those indexes have there keys in ** a different order from the main table. - ** See ticket: https://sqlite.org/src/info/bba7b69f9849b5bf + ** See ticket: https://www.sqlite.org/src/info/bba7b69f9849b5bf */ sqlite3VdbeAddOp1(v, OP_SeekEnd, iIdx); } @@ -126836,14 +125979,13 @@ static void sqlite3RefillIndex(Parse *pParse, Index *pIndex, int memRootPage){ */ SQLITE_PRIVATE Index *sqlite3AllocateIndexObject( sqlite3 *db, /* Database connection */ - int nCol, /* Total number of columns in the index */ + i16 nCol, /* Total number of columns in the index */ int nExtra, /* Number of bytes of extra space to alloc */ char **ppExtra /* Pointer to the "extra" space */ ){ Index *p; /* Allocated index object */ - i64 nByte; /* Bytes of space for Index object + arrays */ + int nByte; /* Bytes of space for Index object + arrays */ - assert( nCol <= 2*db->aLimit[SQLITE_LIMIT_COLUMN] ); nByte = ROUND8(sizeof(Index)) + /* Index structure */ ROUND8(sizeof(char*)*nCol) + /* Index.azColl */ ROUND8(sizeof(LogEst)*(nCol+1) + /* Index.aiRowLogEst */ @@ -126856,9 +125998,8 @@ SQLITE_PRIVATE Index *sqlite3AllocateIndexObject( p->aiRowLogEst = (LogEst*)pExtra; pExtra += sizeof(LogEst)*(nCol+1); p->aiColumn = (i16*)pExtra; pExtra += sizeof(i16)*nCol; p->aSortOrder = (u8*)pExtra; - assert( nCol>0 ); - p->nColumn = (u16)nCol; - p->nKeyCol = (u16)(nCol - 1); + p->nColumn = nCol; + p->nKeyCol = nCol - 1; *ppExtra = ((char*)p) + nByte; } return p; @@ -127669,11 +126810,12 @@ SQLITE_PRIVATE IdList *sqlite3IdListAppend(Parse *pParse, IdList *pList, Token * sqlite3 *db = pParse->db; int i; if( pList==0 ){ - pList = sqlite3DbMallocZero(db, SZ_IDLIST(1)); + pList = sqlite3DbMallocZero(db, sizeof(IdList) ); if( pList==0 ) return 0; }else{ IdList *pNew; - pNew = sqlite3DbRealloc(db, pList, SZ_IDLIST(pList->nId+1)); + pNew = sqlite3DbRealloc(db, pList, + sizeof(IdList) + pList->nId*sizeof(pList->a)); if( pNew==0 ){ sqlite3IdListDelete(db, pList); return 0; @@ -127772,7 +126914,8 @@ SQLITE_PRIVATE SrcList *sqlite3SrcListEnlarge( return 0; } if( nAlloc>SQLITE_MAX_SRCLIST ) nAlloc = SQLITE_MAX_SRCLIST; - pNew = sqlite3DbRealloc(db, pSrc, SZ_SRCLIST(nAlloc)); + pNew = sqlite3DbRealloc(db, pSrc, + sizeof(*pSrc) + (nAlloc-1)*sizeof(pSrc->a[0]) ); if( pNew==0 ){ assert( db->mallocFailed ); return 0; @@ -127847,7 +126990,7 @@ SQLITE_PRIVATE SrcList *sqlite3SrcListAppend( assert( pParse->db!=0 ); db = pParse->db; if( pList==0 ){ - pList = sqlite3DbMallocRawNN(pParse->db, SZ_SRCLIST(1)); + pList = sqlite3DbMallocRawNN(pParse->db, sizeof(SrcList) ); if( pList==0 ) return 0; pList->nAlloc = 1; pList->nSrc = 1; @@ -128733,9 +127876,10 @@ SQLITE_PRIVATE With *sqlite3WithAdd( } if( pWith ){ - pNew = sqlite3DbRealloc(db, pWith, SZ_WITH(pWith->nCte+1)); + sqlite3_int64 nByte = sizeof(*pWith) + (sizeof(pWith->a[1]) * pWith->nCte); + pNew = sqlite3DbRealloc(db, pWith, nByte); }else{ - pNew = sqlite3DbMallocZero(db, SZ_WITH(1)); + pNew = sqlite3DbMallocZero(db, sizeof(*pWith)); } assert( (pNew!=0 && zName!=0) || db->mallocFailed ); @@ -130709,6 +129853,11 @@ static void substrFunc( i64 p1, p2; assert( argc==3 || argc==2 ); + if( sqlite3_value_type(argv[1])==SQLITE_NULL + || (argc==3 && sqlite3_value_type(argv[2])==SQLITE_NULL) + ){ + return; + } p0type = sqlite3_value_type(argv[0]); p1 = sqlite3_value_int64(argv[1]); if( p0type==SQLITE_BLOB ){ @@ -130726,23 +129875,19 @@ static void substrFunc( } } } +#ifdef SQLITE_SUBSTR_COMPATIBILITY + /* If SUBSTR_COMPATIBILITY is defined then substr(X,0,N) work the same as + ** as substr(X,1,N) - it returns the first N characters of X. This + ** is essentially a back-out of the bug-fix in check-in [5fc125d362df4b8] + ** from 2009-02-02 for compatibility of applications that exploited the + ** old buggy behavior. */ + if( p1==0 ) p1 = 1; /* */ +#endif if( argc==3 ){ p2 = sqlite3_value_int64(argv[2]); - if( p2==0 && sqlite3_value_type(argv[2])==SQLITE_NULL ) return; }else{ p2 = sqlite3_context_db_handle(context)->aLimit[SQLITE_LIMIT_LENGTH]; } - if( p1==0 ){ -#ifdef SQLITE_SUBSTR_COMPATIBILITY - /* If SUBSTR_COMPATIBILITY is defined then substr(X,0,N) work the same as - ** as substr(X,1,N) - it returns the first N characters of X. This - ** is essentially a back-out of the bug-fix in check-in [5fc125d362df4b8] - ** from 2009-02-02 for compatibility of applications that exploited the - ** old buggy behavior. */ - p1 = 1; /* */ -#endif - if( sqlite3_value_type(argv[1])==SQLITE_NULL ) return; - } if( p1<0 ){ p1 += len; if( p1<0 ){ @@ -131443,7 +130588,7 @@ static const char hexdigits[] = { ** Append to pStr text that is the SQL literal representation of the ** value contained in pValue. */ -SQLITE_PRIVATE void sqlite3QuoteValue(StrAccum *pStr, sqlite3_value *pValue, int bEscape){ +SQLITE_PRIVATE void sqlite3QuoteValue(StrAccum *pStr, sqlite3_value *pValue){ /* As currently implemented, the string must be initially empty. ** we might relax this requirement in the future, but that will ** require enhancements to the implementation. */ @@ -131491,7 +130636,7 @@ SQLITE_PRIVATE void sqlite3QuoteValue(StrAccum *pStr, sqlite3_value *pValue, int } case SQLITE_TEXT: { const unsigned char *zArg = sqlite3_value_text(pValue); - sqlite3_str_appendf(pStr, bEscape ? "%#Q" : "%Q", zArg); + sqlite3_str_appendf(pStr, "%Q", zArg); break; } default: { @@ -131502,105 +130647,6 @@ SQLITE_PRIVATE void sqlite3QuoteValue(StrAccum *pStr, sqlite3_value *pValue, int } } -/* -** Return true if z[] begins with N hexadecimal digits, and write -** a decoding of those digits into *pVal. Or return false if any -** one of the first N characters in z[] is not a hexadecimal digit. -*/ -static int isNHex(const char *z, int N, u32 *pVal){ - int i; - int v = 0; - for(i=0; i0 ){ - memmove(&zOut[j], &zIn[i], n); - j += n; - i += n; - } - if( zIn[i+1]=='\\' ){ - i += 2; - zOut[j++] = '\\'; - }else if( sqlite3Isxdigit(zIn[i+1]) ){ - if( !isNHex(&zIn[i+1], 4, &v) ) goto unistr_error; - i += 5; - j += sqlite3AppendOneUtf8Character(&zOut[j], v); - }else if( zIn[i+1]=='+' ){ - if( !isNHex(&zIn[i+2], 6, &v) ) goto unistr_error; - i += 8; - j += sqlite3AppendOneUtf8Character(&zOut[j], v); - }else if( zIn[i+1]=='u' ){ - if( !isNHex(&zIn[i+2], 4, &v) ) goto unistr_error; - i += 6; - j += sqlite3AppendOneUtf8Character(&zOut[j], v); - }else if( zIn[i+1]=='U' ){ - if( !isNHex(&zIn[i+2], 8, &v) ) goto unistr_error; - i += 10; - j += sqlite3AppendOneUtf8Character(&zOut[j], v); - }else{ - goto unistr_error; - } - } - zOut[j] = 0; - sqlite3_result_text64(context, zOut, j, sqlite3_free, SQLITE_UTF8); - return; - -unistr_error: - sqlite3_free(zOut); - sqlite3_result_error(context, "invalid Unicode escape", -1); - return; -} - - /* ** Implementation of the QUOTE() function. ** @@ -131610,10 +130656,6 @@ unistr_error: ** as needed. BLOBs are encoded as hexadecimal literals. Strings with ** embedded NUL characters cannot be represented as string literals in SQL ** and hence the returned string literal is truncated prior to the first NUL. -** -** If sqlite3_user_data() is non-zero, then the UNISTR_QUOTE() function is -** implemented instead. The difference is that UNISTR_QUOTE() uses the -** UNISTR() function to escape control characters. */ static void quoteFunc(sqlite3_context *context, int argc, sqlite3_value **argv){ sqlite3_str str; @@ -131621,7 +130663,7 @@ static void quoteFunc(sqlite3_context *context, int argc, sqlite3_value **argv){ assert( argc==1 ); UNUSED_PARAMETER(argc); sqlite3StrAccumInit(&str, db, 0, 0, db->aLimit[SQLITE_LIMIT_LENGTH]); - sqlite3QuoteValue(&str,argv[0],SQLITE_PTR_TO_INT(sqlite3_user_data(context))); + sqlite3QuoteValue(&str,argv[0]); sqlite3_result_text(context, sqlite3StrAccumFinish(&str), str.nChar, SQLITE_DYNAMIC); if( str.accError!=SQLITE_OK ){ @@ -131876,7 +130918,7 @@ static void replaceFunc( assert( zRep==sqlite3_value_text(argv[2]) ); nOut = nStr + 1; assert( nOut0 ){ const char *v = (const char*)sqlite3_value_text(argv[i]); if( v!=0 ){ if( j>0 && nSep>0 ){ @@ -132272,7 +131314,7 @@ static void kahanBabuskaNeumaierInit( ** that it returns NULL if it sums over no inputs. TOTAL returns ** 0.0 in that case. In addition, TOTAL always returns a float where ** SUM might return an integer if it never encounters a floating point -** value. TOTAL never fails, but SUM might throw an exception if +** value. TOTAL never fails, but SUM might through an exception if ** it overflows an integer. */ static void sumStep(sqlite3_context *context, int argc, sqlite3_value **argv){ @@ -133192,9 +132234,7 @@ SQLITE_PRIVATE void sqlite3RegisterBuiltinFunctions(void){ DFUNCTION(sqlite_version, 0, 0, 0, versionFunc ), DFUNCTION(sqlite_source_id, 0, 0, 0, sourceidFunc ), FUNCTION(sqlite_log, 2, 0, 0, errlogFunc ), - FUNCTION(unistr, 1, 0, 0, unistrFunc ), FUNCTION(quote, 1, 0, 0, quoteFunc ), - FUNCTION(unistr_quote, 1, 1, 0, quoteFunc ), VFUNCTION(last_insert_rowid, 0, 0, 0, last_insert_rowid), VFUNCTION(changes, 0, 0, 0, changes ), VFUNCTION(total_changes, 0, 0, 0, total_changes ), @@ -135481,7 +134521,7 @@ SQLITE_PRIVATE Select *sqlite3MultiValues(Parse *pParse, Select *pLeft, ExprList f = (f & pLeft->selFlags); } pSelect = sqlite3SelectNew(pParse, pRow, 0, 0, 0, 0, 0, f, 0); - pLeft->selFlags &= ~(u32)SF_MultiValue; + pLeft->selFlags &= ~SF_MultiValue; if( pSelect ){ pSelect->op = TK_ALL; pSelect->pPrior = pLeft; @@ -135863,22 +134903,28 @@ SQLITE_PRIVATE void sqlite3Insert( aTabColMap = sqlite3DbMallocZero(db, pTab->nCol*sizeof(int)); if( aTabColMap==0 ) goto insert_cleanup; for(i=0; inId; i++){ - j = sqlite3ColumnIndex(pTab, pColumn->a[i].zName); - if( j>=0 ){ - if( aTabColMap[j]==0 ) aTabColMap[j] = i+1; - if( i!=j ) bIdListInOrder = 0; - if( j==pTab->iPKey ){ - ipkColumn = i; assert( !withoutRowid ); - } + const char *zCName = pColumn->a[i].zName; + u8 hName = sqlite3StrIHash(zCName); + for(j=0; jnCol; j++){ + if( pTab->aCol[j].hName!=hName ) continue; + if( sqlite3StrICmp(zCName, pTab->aCol[j].zCnName)==0 ){ + if( aTabColMap[j]==0 ) aTabColMap[j] = i+1; + if( i!=j ) bIdListInOrder = 0; + if( j==pTab->iPKey ){ + ipkColumn = i; assert( !withoutRowid ); + } #ifndef SQLITE_OMIT_GENERATED_COLUMNS - if( pTab->aCol[j].colFlags & (COLFLAG_STORED|COLFLAG_VIRTUAL) ){ - sqlite3ErrorMsg(pParse, - "cannot INSERT into generated column \"%s\"", - pTab->aCol[j].zCnName); - goto insert_cleanup; - } + if( pTab->aCol[j].colFlags & (COLFLAG_STORED|COLFLAG_VIRTUAL) ){ + sqlite3ErrorMsg(pParse, + "cannot INSERT into generated column \"%s\"", + pTab->aCol[j].zCnName); + goto insert_cleanup; + } #endif - }else{ + break; + } + } + if( j>=pTab->nCol ){ if( sqlite3IsRowid(pColumn->a[i].zName) && !withoutRowid ){ ipkColumn = i; bIdListInOrder = 0; @@ -136176,7 +135222,7 @@ SQLITE_PRIVATE void sqlite3Insert( continue; }else if( pColumn==0 ){ /* Hidden columns that are not explicitly named in the INSERT - ** get their default value */ + ** get there default value */ sqlite3ExprCodeFactorable(pParse, sqlite3ColumnExpr(pTab, &pTab->aCol[i]), iRegStore); @@ -136901,7 +135947,7 @@ SQLITE_PRIVATE void sqlite3GenerateConstraintChecks( ** could happen in any order, but they are grouped up front for ** convenience. ** - ** 2018-08-14: Ticket https://sqlite.org/src/info/908f001483982c43 + ** 2018-08-14: Ticket https://www.sqlite.org/src/info/908f001483982c43 ** The order of constraints used to have OE_Update as (2) and OE_Abort ** and so forth as (1). But apparently PostgreSQL checks the OE_Update ** constraint before any others, so it had to be moved. @@ -138711,8 +137757,6 @@ struct sqlite3_api_routines { /* Version 3.44.0 and later */ void *(*get_clientdata)(sqlite3*,const char*); int (*set_clientdata)(sqlite3*, const char*, void*, void(*)(void*)); - /* Version 3.50.0 and later */ - int (*setlk_timeout)(sqlite3*,int,int); }; /* @@ -139046,8 +138090,6 @@ typedef int (*sqlite3_loadext_entry)( /* Version 3.44.0 and later */ #define sqlite3_get_clientdata sqlite3_api->get_clientdata #define sqlite3_set_clientdata sqlite3_api->set_clientdata -/* Version 3.50.0 and later */ -#define sqlite3_setlk_timeout sqlite3_api->setlk_timeout #endif /* !defined(SQLITE_CORE) && !defined(SQLITE_OMIT_LOAD_EXTENSION) */ #if !defined(SQLITE_CORE) && !defined(SQLITE_OMIT_LOAD_EXTENSION) @@ -139569,9 +138611,7 @@ static const sqlite3_api_routines sqlite3Apis = { sqlite3_stmt_explain, /* Version 3.44.0 and later */ sqlite3_get_clientdata, - sqlite3_set_clientdata, - /* Version 3.50.0 and later */ - sqlite3_setlk_timeout + sqlite3_set_clientdata }; /* True if x is the directory separator character @@ -140093,48 +139133,48 @@ static const char *const pragCName[] = { /* 13 */ "pk", /* 14 */ "hidden", /* table_info reuses 8 */ - /* 15 */ "name", /* Used by: function_list */ - /* 16 */ "builtin", + /* 15 */ "schema", /* Used by: table_list */ + /* 16 */ "name", /* 17 */ "type", - /* 18 */ "enc", - /* 19 */ "narg", - /* 20 */ "flags", - /* 21 */ "schema", /* Used by: table_list */ - /* 22 */ "name", - /* 23 */ "type", - /* 24 */ "ncol", - /* 25 */ "wr", - /* 26 */ "strict", - /* 27 */ "seqno", /* Used by: index_xinfo */ - /* 28 */ "cid", - /* 29 */ "name", - /* 30 */ "desc", - /* 31 */ "coll", - /* 32 */ "key", - /* 33 */ "seq", /* Used by: index_list */ - /* 34 */ "name", - /* 35 */ "unique", - /* 36 */ "origin", - /* 37 */ "partial", - /* 38 */ "tbl", /* Used by: stats */ - /* 39 */ "idx", - /* 40 */ "wdth", - /* 41 */ "hght", - /* 42 */ "flgs", + /* 18 */ "ncol", + /* 19 */ "wr", + /* 20 */ "strict", + /* 21 */ "seqno", /* Used by: index_xinfo */ + /* 22 */ "cid", + /* 23 */ "name", + /* 24 */ "desc", + /* 25 */ "coll", + /* 26 */ "key", + /* 27 */ "name", /* Used by: function_list */ + /* 28 */ "builtin", + /* 29 */ "type", + /* 30 */ "enc", + /* 31 */ "narg", + /* 32 */ "flags", + /* 33 */ "tbl", /* Used by: stats */ + /* 34 */ "idx", + /* 35 */ "wdth", + /* 36 */ "hght", + /* 37 */ "flgs", + /* 38 */ "seq", /* Used by: index_list */ + /* 39 */ "name", + /* 40 */ "unique", + /* 41 */ "origin", + /* 42 */ "partial", /* 43 */ "table", /* Used by: foreign_key_check */ /* 44 */ "rowid", /* 45 */ "parent", /* 46 */ "fkid", - /* 47 */ "busy", /* Used by: wal_checkpoint */ - /* 48 */ "log", - /* 49 */ "checkpointed", - /* 50 */ "seq", /* Used by: database_list */ - /* 51 */ "name", - /* 52 */ "file", - /* index_info reuses 27 */ + /* index_info reuses 21 */ + /* 47 */ "seq", /* Used by: database_list */ + /* 48 */ "name", + /* 49 */ "file", + /* 50 */ "busy", /* Used by: wal_checkpoint */ + /* 51 */ "log", + /* 52 */ "checkpointed", + /* collation_list reuses 38 */ /* 53 */ "database", /* Used by: lock_status */ /* 54 */ "status", - /* collation_list reuses 33 */ /* 55 */ "cache_size", /* Used by: default_cache_size */ /* module_list pragma_list reuses 9 */ /* 56 */ "timeout", /* Used by: busy_timeout */ @@ -140227,7 +139267,7 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "collation_list", /* ePragTyp: */ PragTyp_COLLATION_LIST, /* ePragFlg: */ PragFlg_Result0, - /* ColNames: */ 33, 2, + /* ColNames: */ 38, 2, /* iArg: */ 0 }, #endif #if !defined(SQLITE_OMIT_COMPILEOPTION_DIAGS) @@ -140262,7 +139302,7 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "database_list", /* ePragTyp: */ PragTyp_DATABASE_LIST, /* ePragFlg: */ PragFlg_Result0, - /* ColNames: */ 50, 3, + /* ColNames: */ 47, 3, /* iArg: */ 0 }, #endif #if !defined(SQLITE_OMIT_PAGER_PRAGMAS) && !defined(SQLITE_OMIT_DEPRECATED) @@ -140342,7 +139382,7 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "function_list", /* ePragTyp: */ PragTyp_FUNCTION_LIST, /* ePragFlg: */ PragFlg_Result0, - /* ColNames: */ 15, 6, + /* ColNames: */ 27, 6, /* iArg: */ 0 }, #endif #endif @@ -140371,17 +139411,17 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "index_info", /* ePragTyp: */ PragTyp_INDEX_INFO, /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result1|PragFlg_SchemaOpt, - /* ColNames: */ 27, 3, + /* ColNames: */ 21, 3, /* iArg: */ 0 }, {/* zName: */ "index_list", /* ePragTyp: */ PragTyp_INDEX_LIST, /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result1|PragFlg_SchemaOpt, - /* ColNames: */ 33, 5, + /* ColNames: */ 38, 5, /* iArg: */ 0 }, {/* zName: */ "index_xinfo", /* ePragTyp: */ PragTyp_INDEX_INFO, /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result1|PragFlg_SchemaOpt, - /* ColNames: */ 27, 6, + /* ColNames: */ 21, 6, /* iArg: */ 1 }, #endif #if !defined(SQLITE_OMIT_INTEGRITY_CHECK) @@ -140560,7 +139600,7 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "stats", /* ePragTyp: */ PragTyp_STATS, /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result0|PragFlg_SchemaReq, - /* ColNames: */ 38, 5, + /* ColNames: */ 33, 5, /* iArg: */ 0 }, #endif #if !defined(SQLITE_OMIT_PAGER_PRAGMAS) @@ -140579,7 +139619,7 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "table_list", /* ePragTyp: */ PragTyp_TABLE_LIST, /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result1, - /* ColNames: */ 21, 6, + /* ColNames: */ 15, 6, /* iArg: */ 0 }, {/* zName: */ "table_xinfo", /* ePragTyp: */ PragTyp_TABLE_INFO, @@ -140656,7 +139696,7 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "wal_checkpoint", /* ePragTyp: */ PragTyp_WAL_CHECKPOINT, /* ePragFlg: */ PragFlg_NeedSchema, - /* ColNames: */ 47, 3, + /* ColNames: */ 50, 3, /* iArg: */ 0 }, #endif #if !defined(SQLITE_OMIT_FLAG_PRAGMAS) @@ -140678,7 +139718,7 @@ static const PragmaName aPragmaName[] = { ** the following macro or to the actual analysis_limit if it is non-zero, ** in order to prevent PRAGMA optimize from running for too long. ** -** The value of 2000 is chosen empirically so that the worst-case run-time +** The value of 2000 is chosen emperically so that the worst-case run-time ** for PRAGMA optimize does not exceed 100 milliseconds against a variety ** of test databases on a RaspberryPI-4 compiled using -Os and without ** -DSQLITE_DEBUG. Of course, your mileage may vary. For the purpose of @@ -141795,10 +140835,7 @@ SQLITE_PRIVATE void sqlite3Pragma( } }else{ db->flags &= ~mask; - if( mask==SQLITE_DeferFKs ){ - db->nDeferredImmCons = 0; - db->nDeferredCons = 0; - } + if( mask==SQLITE_DeferFKs ) db->nDeferredImmCons = 0; if( (mask & SQLITE_WriteSchema)!=0 && sqlite3_stricmp(zRight, "reset")==0 ){ @@ -144967,7 +144004,7 @@ SQLITE_PRIVATE Select *sqlite3SelectNew( pNew->addrOpenEphm[0] = -1; pNew->addrOpenEphm[1] = -1; pNew->nSelectRow = 0; - if( pSrc==0 ) pSrc = sqlite3DbMallocZero(pParse->db, SZ_SRCLIST_1); + if( pSrc==0 ) pSrc = sqlite3DbMallocZero(pParse->db, sizeof(*pSrc)); pNew->pSrc = pSrc; pNew->pWhere = pWhere; pNew->pGroupBy = pGroupBy; @@ -145132,33 +144169,10 @@ SQLITE_PRIVATE int sqlite3JoinType(Parse *pParse, Token *pA, Token *pB, Token *p */ SQLITE_PRIVATE int sqlite3ColumnIndex(Table *pTab, const char *zCol){ int i; - u8 h; - const Column *aCol; - int nCol; - - h = sqlite3StrIHash(zCol); - aCol = pTab->aCol; - nCol = pTab->nCol; - - /* See if the aHx gives us a lucky match */ - i = pTab->aHx[h % sizeof(pTab->aHx)]; - assert( i=nCol ) break; + u8 h = sqlite3StrIHash(zCol); + Column *pCol; + for(pCol=pTab->aCol, i=0; inCol; pCol++, i++){ + if( pCol->hName==h && sqlite3StrICmp(pCol->zCnName, zCol)==0 ) return i; } return -1; } @@ -145409,7 +144423,7 @@ static int sqlite3ProcessJoin(Parse *pParse, Select *p){ } pE1 = sqlite3CreateColumnExpr(db, pSrc, iLeft, iLeftCol); sqlite3SrcItemColumnUsed(&pSrc->a[iLeft], iLeftCol); - if( (pSrc->a[0].fg.jointype & JT_LTORJ)!=0 && pParse->nErr==0 ){ + if( (pSrc->a[0].fg.jointype & JT_LTORJ)!=0 ){ /* This branch runs if the query contains one or more RIGHT or FULL ** JOINs. If only a single table on the left side of this join ** contains the zName column, then this branch is a no-op. @@ -145425,8 +144439,6 @@ static int sqlite3ProcessJoin(Parse *pParse, Select *p){ */ ExprList *pFuncArgs = 0; /* Arguments to the coalesce() */ static const Token tkCoalesce = { "coalesce", 8 }; - assert( pE1!=0 ); - ExprSetProperty(pE1, EP_CanBeNull); while( tableAndColumnIndex(pSrc, iLeft+1, i, zName, &iLeft, &iLeftCol, pRight->fg.isSynthUsing)!=0 ){ if( pSrc->a[iLeft].fg.isUsing==0 @@ -145443,13 +144455,7 @@ static int sqlite3ProcessJoin(Parse *pParse, Select *p){ if( pFuncArgs ){ pFuncArgs = sqlite3ExprListAppend(pParse, pFuncArgs, pE1); pE1 = sqlite3ExprFunction(pParse, pFuncArgs, &tkCoalesce, 0); - if( pE1 ){ - pE1->affExpr = SQLITE_AFF_DEFER; - } } - }else if( (pSrc->a[i+1].fg.jointype & JT_LEFT)!=0 && pParse->nErr==0 ){ - assert( pE1!=0 ); - ExprSetProperty(pE1, EP_CanBeNull); } pE2 = sqlite3CreateColumnExpr(db, pSrc, i+1, iRightCol); sqlite3SrcItemColumnUsed(pRight, iRightCol); @@ -146358,8 +145364,8 @@ static void selectInnerLoop( ** X extra columns. */ SQLITE_PRIVATE KeyInfo *sqlite3KeyInfoAlloc(sqlite3 *db, int N, int X){ - int nExtra = (N+X)*(sizeof(CollSeq*)+1); - KeyInfo *p = sqlite3DbMallocRawNN(db, SZ_KEYINFO(0) + nExtra); + int nExtra = (N+X)*(sizeof(CollSeq*)+1) - sizeof(CollSeq*); + KeyInfo *p = sqlite3DbMallocRawNN(db, sizeof(KeyInfo) + nExtra); if( p ){ p->aSortFlags = (u8*)&p->aColl[N+X]; p->nKeyField = (u16)N; @@ -146367,7 +145373,7 @@ SQLITE_PRIVATE KeyInfo *sqlite3KeyInfoAlloc(sqlite3 *db, int N, int X){ p->enc = ENC(db); p->db = db; p->nRef = 1; - memset(p->aColl, 0, nExtra); + memset(&p[1], 0, nExtra); }else{ return (KeyInfo*)sqlite3OomFault(db); } @@ -148068,7 +147074,6 @@ static int multiSelect( multi_select_end: pDest->iSdst = dest.iSdst; pDest->nSdst = dest.nSdst; - pDest->iSDParm2 = dest.iSDParm2; if( pDelete ){ sqlite3ParserAddCleanup(pParse, sqlite3SelectDeleteGeneric, pDelete); } @@ -149057,9 +148062,9 @@ static int compoundHasDifferentAffinities(Select *p){ ** from 2015-02-09.) ** ** (3) If the subquery is the right operand of a LEFT JOIN then -** (3a) the subquery may not be a join -** (**) Was (3b): "the FROM clause of the subquery may not contain -** a virtual table" +** (3a) the subquery may not be a join and +** (3b) the FROM clause of the subquery may not contain a virtual +** table and ** (**) Was: "The outer query may not have a GROUP BY." This case ** is now managed correctly ** (3d) the outer query may not be DISTINCT. @@ -149275,7 +148280,7 @@ static int flattenSubquery( */ if( (pSubitem->fg.jointype & (JT_OUTER|JT_LTORJ))!=0 ){ if( pSubSrc->nSrc>1 /* (3a) */ - /**** || IsVirtual(pSubSrc->a[0].pSTab) (3b)-omitted */ + || IsVirtual(pSubSrc->a[0].pSTab) /* (3b) */ || (p->selFlags & SF_Distinct)!=0 /* (3d) */ || (pSubitem->fg.jointype & JT_RIGHT)!=0 /* (26) */ ){ @@ -149679,8 +148684,7 @@ static void constInsert( return; /* Already present. Return without doing anything. */ } } - assert( SQLITE_AFF_NONEbHasAffBlob = 1; } @@ -149755,8 +148759,7 @@ static int propagateConstantExprRewriteOne( if( pColumn==pExpr ) continue; if( pColumn->iTable!=pExpr->iTable ) continue; if( pColumn->iColumn!=pExpr->iColumn ) continue; - assert( SQLITE_AFF_NONEpWinDefn = 0; #endif - p->selFlags &= ~(u32)SF_Compound; + p->selFlags &= ~SF_Compound; assert( (p->selFlags & SF_Converted)==0 ); p->selFlags |= SF_Converted; assert( pNew->pPrior!=0 ); @@ -150886,7 +149889,7 @@ static int selectExpander(Walker *pWalker, Select *p){ pEList = p->pEList; if( pParse->pWith && (p->selFlags & SF_View) ){ if( p->pWith==0 ){ - p->pWith = (With*)sqlite3DbMallocZero(db, SZ_WITH(1) ); + p->pWith = (With*)sqlite3DbMallocZero(db, sizeof(With)); if( p->pWith==0 ){ return WRC_Abort; } @@ -152025,7 +151028,6 @@ static void agginfoFree(sqlite3 *db, void *pArg){ ** * There is no WHERE or GROUP BY or HAVING clauses on the subqueries ** * The outer query is a simple count(*) with no WHERE clause or other ** extraneous syntax. -** * None of the subqueries are DISTINCT (forumpost/a860f5fb2e 2025-03-10) ** ** Return TRUE if the optimization is undertaken. */ @@ -152058,11 +151060,7 @@ static int countOfViewOptimization(Parse *pParse, Select *p){ if( pSub->op!=TK_ALL && pSub->pPrior ) return 0; /* Must be UNION ALL */ if( pSub->pWhere ) return 0; /* No WHERE clause */ if( pSub->pLimit ) return 0; /* No LIMIT clause */ - if( pSub->selFlags & (SF_Aggregate|SF_Distinct) ){ - testcase( pSub->selFlags & SF_Aggregate ); - testcase( pSub->selFlags & SF_Distinct ); - return 0; /* Not an aggregate nor DISTINCT */ - } + if( pSub->selFlags & SF_Aggregate ) return 0; /* Not an aggregate */ assert( pSub->pHaving==0 ); /* Due to the previous */ pSub = pSub->pPrior; /* Repeat over compound */ }while( pSub ); @@ -152074,14 +151072,14 @@ static int countOfViewOptimization(Parse *pParse, Select *p){ pExpr = 0; pSub = sqlite3SubqueryDetach(db, pFrom); sqlite3SrcListDelete(db, p->pSrc); - p->pSrc = sqlite3DbMallocZero(pParse->db, SZ_SRCLIST_1); + p->pSrc = sqlite3DbMallocZero(pParse->db, sizeof(*p->pSrc)); while( pSub ){ Expr *pTerm; pPrior = pSub->pPrior; pSub->pPrior = 0; pSub->pNext = 0; pSub->selFlags |= SF_Aggregate; - pSub->selFlags &= ~(u32)SF_Compound; + pSub->selFlags &= ~SF_Compound; pSub->nSelectRow = 0; sqlite3ParserAddCleanup(pParse, sqlite3ExprListDeleteGeneric, pSub->pEList); pTerm = pPrior ? sqlite3ExprDup(db, pCount, 0) : pCount; @@ -152096,7 +151094,7 @@ static int countOfViewOptimization(Parse *pParse, Select *p){ pSub = pPrior; } p->pEList->a[0].pExpr = pExpr; - p->selFlags &= ~(u32)SF_Aggregate; + p->selFlags &= ~SF_Aggregate; #if TREETRACE_ENABLED if( sqlite3TreeTrace & 0x200 ){ @@ -152303,7 +151301,7 @@ SQLITE_PRIVATE int sqlite3Select( testcase( pParse->earlyCleanup ); p->pOrderBy = 0; } - p->selFlags &= ~(u32)SF_Distinct; + p->selFlags &= ~SF_Distinct; p->selFlags |= SF_NoopOrderBy; } sqlite3SelectPrep(pParse, p, 0); @@ -152342,7 +151340,7 @@ SQLITE_PRIVATE int sqlite3Select( ** and leaving this flag set can cause errors if a compound sub-query ** in p->pSrc is flattened into this query and this function called ** again as part of compound SELECT processing. */ - p->selFlags &= ~(u32)SF_UFSrcCheck; + p->selFlags &= ~SF_UFSrcCheck; } if( pDest->eDest==SRT_Output ){ @@ -152831,7 +151829,7 @@ SQLITE_PRIVATE int sqlite3Select( && p->pWin==0 #endif ){ - p->selFlags &= ~(u32)SF_Distinct; + p->selFlags &= ~SF_Distinct; pGroupBy = p->pGroupBy = sqlite3ExprListDup(db, pEList, 0); if( pGroupBy ){ for(i=0; inExpr; i++){ @@ -152940,12 +151938,6 @@ SQLITE_PRIVATE int sqlite3Select( if( pWInfo==0 ) goto select_end; if( sqlite3WhereOutputRowCount(pWInfo) < p->nSelectRow ){ p->nSelectRow = sqlite3WhereOutputRowCount(pWInfo); - if( pDest->eDest<=SRT_DistQueue && pDest->eDest>=SRT_DistFifo ){ - /* TUNING: For a UNION CTE, because UNION is implies DISTINCT, - ** reduce the estimated output row count by 8 (LogEst 30). - ** Search for tag-20250414a to see other cases */ - p->nSelectRow -= 30; - } } if( sDistinct.isTnct && sqlite3WhereIsDistinct(pWInfo) ){ sDistinct.eTnctType = sqlite3WhereIsDistinct(pWInfo); @@ -153319,10 +152311,6 @@ SQLITE_PRIVATE int sqlite3Select( if( iOrderByCol ){ Expr *pX = p->pEList->a[iOrderByCol-1].pExpr; Expr *pBase = sqlite3ExprSkipCollateAndLikely(pX); - while( ALWAYS(pBase!=0) && pBase->op==TK_IF_NULL_ROW ){ - pX = pBase->pLeft; - pBase = sqlite3ExprSkipCollateAndLikely(pX); - } if( ALWAYS(pBase!=0) && pBase->op!=TK_AGG_COLUMN && pBase->op!=TK_REGISTER @@ -153906,8 +152894,7 @@ SQLITE_PRIVATE Trigger *sqlite3TriggerList(Parse *pParse, Table *pTab){ assert( pParse->db->pVtabCtx==0 ); #endif assert( pParse->bReturning ); - assert( !pParse->isCreate ); - assert( &(pParse->u1.d.pReturning->retTrig) == pTrig ); + assert( &(pParse->u1.pReturning->retTrig) == pTrig ); pTrig->table = pTab->zName; pTrig->pTabSchema = pTab->pSchema; pTrig->pNext = pList; @@ -154875,8 +153862,7 @@ static void codeReturningTrigger( ExprList *pNew; Returning *pReturning; Select sSelect; - SrcList *pFrom; - u8 fromSpace[SZ_SRCLIST_1]; + SrcList sFrom; assert( v!=0 ); if( !pParse->bReturning ){ @@ -154885,21 +153871,19 @@ static void codeReturningTrigger( return; } assert( db->pParse==pParse ); - assert( !pParse->isCreate ); - pReturning = pParse->u1.d.pReturning; + pReturning = pParse->u1.pReturning; if( pTrigger != &(pReturning->retTrig) ){ /* This RETURNING trigger is for a different statement */ return; } memset(&sSelect, 0, sizeof(sSelect)); - pFrom = (SrcList*)fromSpace; - memset(pFrom, 0, SZ_SRCLIST_1); + memset(&sFrom, 0, sizeof(sFrom)); sSelect.pEList = sqlite3ExprListDup(db, pReturning->pReturnEL, 0); - sSelect.pSrc = pFrom; - pFrom->nSrc = 1; - pFrom->a[0].pSTab = pTab; - pFrom->a[0].zName = pTab->zName; /* tag-20240424-1 */ - pFrom->a[0].iCursor = -1; + sSelect.pSrc = &sFrom; + sFrom.nSrc = 1; + sFrom.a[0].pSTab = pTab; + sFrom.a[0].zName = pTab->zName; /* tag-20240424-1 */ + sFrom.a[0].iCursor = -1; sqlite3SelectPrep(pParse, &sSelect, 0); if( pParse->nErr==0 ){ assert( db->mallocFailed==0 ); @@ -155117,8 +154101,6 @@ static TriggerPrg *codeRowTrigger( sSubParse.eTriggerOp = pTrigger->op; sSubParse.nQueryLoop = pParse->nQueryLoop; sSubParse.prepFlags = pParse->prepFlags; - sSubParse.oldmask = 0; - sSubParse.newmask = 0; v = sqlite3GetVdbe(&sSubParse); if( v ){ @@ -155873,32 +154855,38 @@ SQLITE_PRIVATE void sqlite3Update( */ chngRowid = chngPk = 0; for(i=0; inExpr; i++){ + u8 hCol = sqlite3StrIHash(pChanges->a[i].zEName); /* If this is an UPDATE with a FROM clause, do not resolve expressions ** here. The call to sqlite3Select() below will do that. */ if( nChangeFrom==0 && sqlite3ResolveExprNames(&sNC, pChanges->a[i].pExpr) ){ goto update_cleanup; } - j = sqlite3ColumnIndex(pTab, pChanges->a[i].zEName); - if( j>=0 ){ - if( j==pTab->iPKey ){ - chngRowid = 1; - pRowidExpr = pChanges->a[i].pExpr; - iRowidExpr = i; - }else if( pPk && (pTab->aCol[j].colFlags & COLFLAG_PRIMKEY)!=0 ){ - chngPk = 1; - } + for(j=0; jnCol; j++){ + if( pTab->aCol[j].hName==hCol + && sqlite3StrICmp(pTab->aCol[j].zCnName, pChanges->a[i].zEName)==0 + ){ + if( j==pTab->iPKey ){ + chngRowid = 1; + pRowidExpr = pChanges->a[i].pExpr; + iRowidExpr = i; + }else if( pPk && (pTab->aCol[j].colFlags & COLFLAG_PRIMKEY)!=0 ){ + chngPk = 1; + } #ifndef SQLITE_OMIT_GENERATED_COLUMNS - else if( pTab->aCol[j].colFlags & COLFLAG_GENERATED ){ - testcase( pTab->aCol[j].colFlags & COLFLAG_VIRTUAL ); - testcase( pTab->aCol[j].colFlags & COLFLAG_STORED ); - sqlite3ErrorMsg(pParse, - "cannot UPDATE generated column \"%s\"", - pTab->aCol[j].zCnName); - goto update_cleanup; - } + else if( pTab->aCol[j].colFlags & COLFLAG_GENERATED ){ + testcase( pTab->aCol[j].colFlags & COLFLAG_VIRTUAL ); + testcase( pTab->aCol[j].colFlags & COLFLAG_STORED ); + sqlite3ErrorMsg(pParse, + "cannot UPDATE generated column \"%s\"", + pTab->aCol[j].zCnName); + goto update_cleanup; + } #endif - aXRef[j] = i; - }else{ + aXRef[j] = i; + break; + } + } + if( j>=pTab->nCol ){ if( pPk==0 && sqlite3IsRowid(pChanges->a[i].zEName) ){ j = -1; chngRowid = 1; @@ -157221,7 +156209,7 @@ SQLITE_PRIVATE void sqlite3Vacuum(Parse *pParse, Token *pNm, Expr *pInto){ #else /* When SQLITE_BUG_COMPATIBLE_20160819 is defined, unrecognized arguments ** to VACUUM are silently ignored. This is a back-out of a bug fix that - ** occurred on 2016-08-19 (https://sqlite.org/src/info/083f9e6270). + ** occurred on 2016-08-19 (https://www.sqlite.org/src/info/083f9e6270). ** The buggy behavior is required for binary compatibility with some ** legacy applications. */ iDb = sqlite3FindDb(pParse->db, pNm); @@ -157300,7 +156288,7 @@ SQLITE_PRIVATE SQLITE_NOINLINE int sqlite3RunVacuum( saved_nChange = db->nChange; saved_nTotalChange = db->nTotalChange; saved_mTrace = db->mTrace; - db->flags |= SQLITE_WriteSchema | SQLITE_IgnoreChecks | SQLITE_Comments; + db->flags |= SQLITE_WriteSchema | SQLITE_IgnoreChecks; db->mDbFlags |= DBFLAG_PreferBuiltin | DBFLAG_Vacuum; db->flags &= ~(u64)(SQLITE_ForeignKeys | SQLITE_ReverseOrder | SQLITE_Defensive | SQLITE_CountRows); @@ -158005,12 +156993,11 @@ SQLITE_PRIVATE void sqlite3VtabFinishParse(Parse *pParse, Token *pEnd){ ** schema table. We just need to update that slot with all ** the information we've collected. ** - ** The VM register number pParse->u1.cr.regRowid holds the rowid of an + ** The VM register number pParse->regRowid holds the rowid of an ** entry in the sqlite_schema table that was created for this vtab ** by sqlite3StartTable(). */ iDb = sqlite3SchemaToIndex(db, pTab->pSchema); - assert( pParse->isCreate ); sqlite3NestedParse(pParse, "UPDATE %Q." LEGACY_SCHEMA_TABLE " " "SET type='table', name=%Q, tbl_name=%Q, rootpage=0, sql=%Q " @@ -158019,7 +157006,7 @@ SQLITE_PRIVATE void sqlite3VtabFinishParse(Parse *pParse, Token *pEnd){ pTab->zName, pTab->zName, zStmt, - pParse->u1.cr.regRowid + pParse->regRowid ); v = sqlite3GetVdbe(pParse); sqlite3ChangeCookie(pParse, iDb); @@ -159429,14 +158416,9 @@ struct WhereInfo { Bitmask revMask; /* Mask of ORDER BY terms that need reversing */ WhereClause sWC; /* Decomposition of the WHERE clause */ WhereMaskSet sMaskSet; /* Map cursor numbers to bitmasks */ - WhereLevel a[FLEXARRAY]; /* Information about each nest loop in WHERE */ + WhereLevel a[1]; /* Information about each nest loop in WHERE */ }; -/* -** The size (in bytes) of a WhereInfo object that holds N WhereLevels. -*/ -#define SZ_WHEREINFO(N) ROUND8(offsetof(WhereInfo,a)+(N)*sizeof(WhereLevel)) - /* ** Private interfaces - callable only by other where.c routines. ** @@ -160116,7 +159098,7 @@ static void adjustOrderByCol(ExprList *pOrderBy, ExprList *pEList){ /* ** pX is an expression of the form: (vector) IN (SELECT ...) ** In other words, it is a vector IN operator with a SELECT clause on the -** RHS. But not all terms in the vector are indexable and the terms might +** LHS. But not all terms in the vector are indexable and the terms might ** not be in the correct order for indexing. ** ** This routine makes a copy of the input pX expression and then adjusts @@ -160172,9 +159154,7 @@ static Expr *removeUnindexableInClauseTerms( int iField; assert( (pLoop->aLTerm[i]->eOperator & (WO_OR|WO_AND))==0 ); iField = pLoop->aLTerm[i]->u.x.iField - 1; - if( NEVER(pOrigRhs->a[iField].pExpr==0) ){ - continue; /* Duplicate PK column */ - } + if( pOrigRhs->a[iField].pExpr==0 ) continue; /* Duplicate PK column */ pRhs = sqlite3ExprListAppend(pParse, pRhs, pOrigRhs->a[iField].pExpr); pOrigRhs->a[iField].pExpr = 0; if( pRhs ) pRhs->a[pRhs->nExpr-1].u.x.iOrderByCol = iField+1; @@ -160271,7 +159251,7 @@ static SQLITE_NOINLINE void codeINTerm( return; } } - for(i=iEq; inLTerm; i++){ + for(i=iEq;inLTerm; i++){ assert( pLoop->aLTerm[i]!=0 ); if( pLoop->aLTerm[i]->pExpr==pX ) nEq++; } @@ -160280,13 +159260,22 @@ static SQLITE_NOINLINE void codeINTerm( if( !ExprUseXSelect(pX) || pX->x.pSelect->pEList->nExpr==1 ){ eType = sqlite3FindInIndex(pParse, pX, IN_INDEX_LOOP, 0, 0, &iTab); }else{ - sqlite3 *db = pParse->db; - Expr *pXMod = removeUnindexableInClauseTerms(pParse, iEq, pLoop, pX); - if( !db->mallocFailed ){ - aiMap = (int*)sqlite3DbMallocZero(db, sizeof(int)*nEq); - eType = sqlite3FindInIndex(pParse, pXMod, IN_INDEX_LOOP, 0, aiMap, &iTab); + Expr *pExpr = pTerm->pExpr; + if( pExpr->iTable==0 || !ExprHasProperty(pExpr, EP_Subrtn) ){ + sqlite3 *db = pParse->db; + pX = removeUnindexableInClauseTerms(pParse, iEq, pLoop, pX); + if( !db->mallocFailed ){ + aiMap = (int*)sqlite3DbMallocZero(pParse->db, sizeof(int)*nEq); + eType = sqlite3FindInIndex(pParse, pX, IN_INDEX_LOOP, 0, aiMap,&iTab); + pExpr->iTable = iTab; + } + sqlite3ExprDelete(db, pX); + }else{ + int n = sqlite3ExprVectorSize(pX->pLeft); + aiMap = (int*)sqlite3DbMallocZero(pParse->db, sizeof(int)*MAX(nEq,n)); + eType = sqlite3FindInIndex(pParse, pX, IN_INDEX_LOOP, 0, aiMap, &iTab); } - sqlite3ExprDelete(db, pXMod); + pX = pExpr; } if( eType==IN_INDEX_INDEX_DESC ){ @@ -160316,7 +159305,7 @@ static SQLITE_NOINLINE void codeINTerm( if( pIn ){ int iMap = 0; /* Index in aiMap[] */ pIn += i; - for(i=iEq; inLTerm; i++){ + for(i=iEq;inLTerm; i++){ if( pLoop->aLTerm[i]->pExpr==pX ){ int iOut = iTarget + i - iEq; if( eType==IN_INDEX_ROWID ){ @@ -161175,9 +160164,6 @@ SQLITE_PRIVATE Bitmask sqlite3WhereCodeOneLoopStart( } sqlite3VdbeAddOp2(v, OP_Integer, pLoop->u.vtab.idxNum, iReg); sqlite3VdbeAddOp2(v, OP_Integer, nConstraint, iReg+1); - /* The instruction immediately prior to OP_VFilter must be an OP_Integer - ** that sets the "argc" value for xVFilter. This is necessary for - ** resolveP2() to work correctly. See tag-20250207a. */ sqlite3VdbeAddOp4(v, OP_VFilter, iCur, addrNotFound, iReg, pLoop->u.vtab.idxStr, pLoop->u.vtab.needFree ? P4_DYNAMIC : P4_STATIC); @@ -161768,13 +160754,12 @@ SQLITE_PRIVATE Bitmask sqlite3WhereCodeOneLoopStart( if( pLevel->iLeftJoin==0 ){ /* If a partial index is driving the loop, try to eliminate WHERE clause ** terms from the query that must be true due to the WHERE clause of - ** the partial index. This optimization does not work on an outer join, - ** as shown by: + ** the partial index. ** - ** 2019-11-02 ticket 623eff57e76d45f6 (LEFT JOIN) - ** 2025-05-29 forum post 7dee41d32506c4ae (RIGHT JOIN) + ** 2019-11-02 ticket 623eff57e76d45f6: This optimization does not work + ** for a LEFT JOIN. */ - if( pIdx->pPartIdxWhere && pLevel->pRJ==0 ){ + if( pIdx->pPartIdxWhere ){ whereApplyPartialIndexConstraints(pIdx->pPartIdxWhere, iCur, pWC); } }else{ @@ -161881,7 +160866,8 @@ SQLITE_PRIVATE Bitmask sqlite3WhereCodeOneLoopStart( int nNotReady; /* The number of notReady tables */ SrcItem *origSrc; /* Original list of tables */ nNotReady = pWInfo->nLevel - iLevel - 1; - pOrTab = sqlite3DbMallocRawNN(db, SZ_SRCLIST(nNotReady+1)); + pOrTab = sqlite3DbMallocRawNN(db, + sizeof(*pOrTab)+ nNotReady*sizeof(pOrTab->a[0])); if( pOrTab==0 ) return notReady; pOrTab->nAlloc = (u8)(nNotReady + 1); pOrTab->nSrc = pOrTab->nAlloc; @@ -161932,7 +160918,7 @@ SQLITE_PRIVATE Bitmask sqlite3WhereCodeOneLoopStart( ** ** This optimization also only applies if the (x1 OR x2 OR ...) term ** is not contained in the ON clause of a LEFT JOIN. - ** See ticket http://sqlite.org/src/info/f2369304e4 + ** See ticket http://www.sqlite.org/src/info/f2369304e4 ** ** 2022-02-04: Do not push down slices of a row-value comparison. ** In other words, "w" or "y" may not be a slice of a vector. Otherwise, @@ -162424,8 +161410,7 @@ SQLITE_PRIVATE SQLITE_NOINLINE void sqlite3WhereRightJoinLoop( WhereInfo *pSubWInfo; WhereLoop *pLoop = pLevel->pWLoop; SrcItem *pTabItem = &pWInfo->pTabList->a[pLevel->iFrom]; - SrcList *pFrom; - u8 fromSpace[SZ_SRCLIST_1]; + SrcList sFrom; Bitmask mAll = 0; int k; @@ -162469,14 +161454,13 @@ SQLITE_PRIVATE SQLITE_NOINLINE void sqlite3WhereRightJoinLoop( sqlite3ExprDup(pParse->db, pTerm->pExpr, 0)); } } - pFrom = (SrcList*)fromSpace; - pFrom->nSrc = 1; - pFrom->nAlloc = 1; - memcpy(&pFrom->a[0], pTabItem, sizeof(SrcItem)); - pFrom->a[0].fg.jointype = 0; + sFrom.nSrc = 1; + sFrom.nAlloc = 1; + memcpy(&sFrom.a[0], pTabItem, sizeof(SrcItem)); + sFrom.a[0].fg.jointype = 0; assert( pParse->withinRJSubrtn < 100 ); pParse->withinRJSubrtn++; - pSubWInfo = sqlite3WhereBegin(pParse, pFrom, pSubWhere, 0, 0, 0, + pSubWInfo = sqlite3WhereBegin(pParse, &sFrom, pSubWhere, 0, 0, 0, WHERE_RIGHT_JOIN, 0); if( pSubWInfo ){ int iCur = pLevel->iTabCur; @@ -163447,42 +162431,30 @@ static void exprAnalyzeOrTerm( ** 1. The SQLITE_Transitive optimization must be enabled ** 2. Must be either an == or an IS operator ** 3. Not originating in the ON clause of an OUTER JOIN -** 4. The operator is not IS or else the query does not contain RIGHT JOIN -** 5. The affinities of A and B must be compatible -** 6a. Both operands use the same collating sequence OR -** 6b. The overall collating sequence is BINARY +** 4. The affinities of A and B must be compatible +** 5a. Both operands use the same collating sequence OR +** 5b. The overall collating sequence is BINARY ** If this routine returns TRUE, that means that the RHS can be substituted ** for the LHS anyplace else in the WHERE clause where the LHS column occurs. ** This is an optimization. No harm comes from returning 0. But if 1 is ** returned when it should not be, then incorrect answers might result. */ -static int termIsEquivalence(Parse *pParse, Expr *pExpr, SrcList *pSrc){ +static int termIsEquivalence(Parse *pParse, Expr *pExpr){ char aff1, aff2; CollSeq *pColl; - if( !OptimizationEnabled(pParse->db, SQLITE_Transitive) ) return 0; /* (1) */ - if( pExpr->op!=TK_EQ && pExpr->op!=TK_IS ) return 0; /* (2) */ - if( ExprHasProperty(pExpr, EP_OuterON) ) return 0; /* (3) */ - assert( pSrc!=0 ); - if( pExpr->op==TK_IS - && pSrc->nSrc - && (pSrc->a[0].fg.jointype & JT_LTORJ)!=0 - ){ - return 0; /* (4) */ - } + if( !OptimizationEnabled(pParse->db, SQLITE_Transitive) ) return 0; + if( pExpr->op!=TK_EQ && pExpr->op!=TK_IS ) return 0; + if( ExprHasProperty(pExpr, EP_OuterON) ) return 0; aff1 = sqlite3ExprAffinity(pExpr->pLeft); aff2 = sqlite3ExprAffinity(pExpr->pRight); if( aff1!=aff2 && (!sqlite3IsNumericAffinity(aff1) || !sqlite3IsNumericAffinity(aff2)) ){ - return 0; /* (5) */ + return 0; } pColl = sqlite3ExprCompareCollSeq(pParse, pExpr); - if( !sqlite3IsBinary(pColl) - && !sqlite3ExprCollSeqMatch(pParse, pExpr->pLeft, pExpr->pRight) - ){ - return 0; /* (6) */ - } - return 1; + if( sqlite3IsBinary(pColl) ) return 1; + return sqlite3ExprCollSeqMatch(pParse, pExpr->pLeft, pExpr->pRight); } /* @@ -163747,8 +162719,8 @@ static void exprAnalyze( if( op==TK_IS ) pNew->wtFlags |= TERM_IS; pTerm = &pWC->a[idxTerm]; pTerm->wtFlags |= TERM_COPIED; - assert( pWInfo->pTabList!=0 ); - if( termIsEquivalence(pParse, pDup, pWInfo->pTabList) ){ + + if( termIsEquivalence(pParse, pDup) ){ pTerm->eOperator |= WO_EQUIV; eExtraOp = WO_EQUIV; } @@ -164476,16 +163448,11 @@ struct HiddenIndexInfo { int eDistinct; /* Value to return from sqlite3_vtab_distinct() */ u32 mIn; /* Mask of terms that are IN (...) */ u32 mHandleIn; /* Terms that vtab will handle as IN (...) */ - sqlite3_value *aRhs[FLEXARRAY]; /* RHS values for constraints. MUST BE LAST - ** Extra space is allocated to hold up - ** to nTerm such values */ + sqlite3_value *aRhs[1]; /* RHS values for constraints. MUST BE LAST + ** because extra space is allocated to hold up + ** to nTerm such values */ }; -/* Size (in bytes) of a HiddenIndeInfo object sufficient to hold as -** many as N constraints */ -#define SZ_HIDDENINDEXINFO(N) \ - (offsetof(HiddenIndexInfo,aRhs) + (N)*sizeof(sqlite3_value*)) - /* Forward declaration of methods */ static int whereLoopResize(sqlite3*, WhereLoop*, int); @@ -165550,8 +164517,6 @@ static SQLITE_NOINLINE void constructAutomaticIndex( } /* Construct the Index object to describe this index */ - assert( nKeyCol <= pTable->nCol + MAX(0, pTable->nCol - BMS + 1) ); - /* ^-- This guarantees that the number of index columns will fit in the u16 */ pIdx = sqlite3AllocateIndexObject(pParse->db, nKeyCol+HasRowid(pTable), 0, &zNotUsed); if( pIdx==0 ) goto end_auto_index_create; @@ -165963,8 +164928,8 @@ static sqlite3_index_info *allocateIndexInfo( */ pIdxInfo = sqlite3DbMallocZero(pParse->db, sizeof(*pIdxInfo) + (sizeof(*pIdxCons) + sizeof(*pUsage))*nTerm - + sizeof(*pIdxOrderBy)*nOrderBy - + SZ_HIDDENINDEXINFO(nTerm) ); + + sizeof(*pIdxOrderBy)*nOrderBy + sizeof(*pHidden) + + sizeof(sqlite3_value*)*nTerm ); if( pIdxInfo==0 ){ sqlite3ErrorMsg(pParse, "out of memory"); return 0; @@ -167600,8 +166565,11 @@ static int whereLoopAddBtreeIndex( assert( pNew->u.btree.nBtm==0 ); opMask = WO_EQ|WO_IN|WO_GT|WO_GE|WO_LT|WO_LE|WO_ISNULL|WO_IS; } - if( pProbe->bUnordered ){ - opMask &= ~(WO_GT|WO_GE|WO_LT|WO_LE); + if( pProbe->bUnordered || pProbe->bLowQual ){ + if( pProbe->bUnordered ) opMask &= ~(WO_GT|WO_GE|WO_LT|WO_LE); + if( pProbe->bLowQual && pSrc->fg.isIndexedBy==0 ){ + opMask &= ~(WO_EQ|WO_IN|WO_IS); + } } assert( pNew->u.btree.nEqnColumn ); @@ -167674,7 +166642,6 @@ static int whereLoopAddBtreeIndex( if( ExprUseXSelect(pExpr) ){ /* "x IN (SELECT ...)": TUNING: the SELECT returns 25 rows */ int i; - int bRedundant = 0; nIn = 46; assert( 46==sqlite3LogEst(25) ); /* The expression may actually be of the form (x, y) IN (SELECT...). @@ -167683,20 +166650,7 @@ static int whereLoopAddBtreeIndex( ** for each such term. The following loop checks that pTerm is the ** first such term in use, and sets nIn back to 0 if it is not. */ for(i=0; inLTerm-1; i++){ - if( pNew->aLTerm[i] && pNew->aLTerm[i]->pExpr==pExpr ){ - nIn = 0; - if( pNew->aLTerm[i]->u.x.iField == pTerm->u.x.iField ){ - /* Detect when two or more columns of an index match the same - ** column of a vector IN operater, and avoid adding the column - ** to the WhereLoop more than once. See tag-20250707-01 - ** in test/rowvalue.test */ - bRedundant = 1; - } - } - } - if( bRedundant ){ - pNew->nLTerm--; - continue; + if( pNew->aLTerm[i] && pNew->aLTerm[i]->pExpr==pExpr ) nIn = 0; } }else if( ALWAYS(pExpr->x.pList && pExpr->x.pList->nExpr) ){ /* "x IN (value, value, ...)" */ @@ -167928,7 +166882,7 @@ static int whereLoopAddBtreeIndex( if( (pNew->wsFlags & WHERE_TOP_LIMIT)==0 && pNew->u.btree.nEqnColumn && (pNew->u.btree.nEqnKeyCol || - pProbe->idxType!=SQLITE_IDXTYPE_PRIMARYKEY) + pProbe->idxType!=SQLITE_IDXTYPE_PRIMARYKEY) ){ if( pNew->u.btree.nEq>3 ){ sqlite3ProgressCheck(pParse); @@ -168057,7 +167011,6 @@ static int whereUsablePartialIndex( if( (!ExprHasProperty(pExpr, EP_OuterON) || pExpr->w.iJoin==iTab) && ((jointype & JT_OUTER)==0 || ExprHasProperty(pExpr, EP_OuterON)) && sqlite3ExprImpliesExpr(pParse, pExpr, pWhere, iTab) - && !sqlite3ExprImpliesExpr(pParse, pExpr, pWhere, -1) && (pTerm->wtFlags & TERM_VNULL)==0 ){ return 1; @@ -168553,7 +167506,7 @@ static int whereLoopAddBtree( && (HasRowid(pTab) || pWInfo->pSelect!=0 || sqlite3FaultSim(700)) ){ WHERETRACE(0x200, - ("-> %s is a covering index according to bitmasks\n", + ("-> %s a covering index according to bitmasks\n", pProbe->zName, m==0 ? "is" : "is not")); pNew->wsFlags = WHERE_IDX_ONLY | WHERE_INDEXED; } @@ -171170,7 +170123,10 @@ SQLITE_PRIVATE WhereInfo *sqlite3WhereBegin( ** field (type Bitmask) it must be aligned on an 8-byte boundary on ** some architectures. Hence the ROUND8() below. */ - nByteWInfo = SZ_WHEREINFO(nTabList); + nByteWInfo = ROUND8P(sizeof(WhereInfo)); + if( nTabList>1 ){ + nByteWInfo = ROUND8P(nByteWInfo + (nTabList-1)*sizeof(WhereLevel)); + } pWInfo = sqlite3DbMallocRawNN(db, nByteWInfo + sizeof(WhereLoop)); if( db->mallocFailed ){ sqlite3DbFree(db, pWInfo); @@ -171387,8 +170343,7 @@ SQLITE_PRIVATE WhereInfo *sqlite3WhereBegin( } /* TUNING: Assume that a DISTINCT clause on a subquery reduces - ** the output size by a factor of 8 (LogEst -30). Search for - ** tag-20250414a to see other cases. + ** the output size by a factor of 8 (LogEst -30). */ if( (pWInfo->wctrlFlags & WHERE_WANT_DISTINCT)!=0 ){ WHERETRACE(0x0080,("nRowOut reduced from %d to %d due to DISTINCT\n", @@ -173123,7 +172078,7 @@ SQLITE_PRIVATE int sqlite3WindowRewrite(Parse *pParse, Select *p){ p->pWhere = 0; p->pGroupBy = 0; p->pHaving = 0; - p->selFlags &= ~(u32)SF_Aggregate; + p->selFlags &= ~SF_Aggregate; p->selFlags |= SF_WinRewrite; /* Create the ORDER BY clause for the sub-select. This is the concatenation @@ -175263,11 +174218,6 @@ SQLITE_PRIVATE void sqlite3WindowCodeStep( /* #include "sqliteInt.h" */ -/* -** Verify that the pParse->isCreate field is set -*/ -#define ASSERT_IS_CREATE assert(pParse->isCreate) - /* ** Disable all error recovery processing in the parser push-down ** automaton. @@ -175331,10 +174281,6 @@ static void parserSyntaxError(Parse *pParse, Token *p){ static void disableLookaside(Parse *pParse){ sqlite3 *db = pParse->db; pParse->disableLookaside++; -#ifdef SQLITE_DEBUG - pParse->isCreate = 1; -#endif - memset(&pParse->u1.cr, 0, sizeof(pParse->u1.cr)); DisableLookaside; } @@ -178971,9 +177917,7 @@ static YYACTIONTYPE yy_reduce( } break; case 14: /* createkw ::= CREATE */ -{ - disableLookaside(pParse); -} +{disableLookaside(pParse);} break; case 15: /* ifnotexists ::= */ case 18: /* temp ::= */ yytestcase(yyruleno==18); @@ -179065,7 +178009,7 @@ static YYACTIONTYPE yy_reduce( break; case 32: /* ccons ::= CONSTRAINT nm */ case 67: /* tcons ::= CONSTRAINT nm */ yytestcase(yyruleno==67); -{ASSERT_IS_CREATE; pParse->u1.cr.constraintName = yymsp[0].minor.yy0;} +{pParse->constraintName = yymsp[0].minor.yy0;} break; case 33: /* ccons ::= DEFAULT scantok term */ {sqlite3AddDefaultValue(pParse,yymsp[0].minor.yy590,yymsp[-1].minor.yy0.z,&yymsp[-1].minor.yy0.z[yymsp[-1].minor.yy0.n]);} @@ -179175,7 +178119,7 @@ static YYACTIONTYPE yy_reduce( {yymsp[-1].minor.yy502 = 0;} break; case 66: /* tconscomma ::= COMMA */ -{ASSERT_IS_CREATE; pParse->u1.cr.constraintName.n = 0;} +{pParse->constraintName.n = 0;} break; case 68: /* tcons ::= PRIMARY KEY LP sortlist autoinc RP onconf */ {sqlite3AddPrimaryKey(pParse,yymsp[-3].minor.yy402,yymsp[0].minor.yy502,yymsp[-2].minor.yy502,0);} @@ -179262,8 +178206,8 @@ static YYACTIONTYPE yy_reduce( if( pRhs ){ pRhs->op = (u8)yymsp[-1].minor.yy502; pRhs->pPrior = pLhs; - if( ALWAYS(pLhs) ) pLhs->selFlags &= ~(u32)SF_MultiValue; - pRhs->selFlags &= ~(u32)SF_MultiValue; + if( ALWAYS(pLhs) ) pLhs->selFlags &= ~SF_MultiValue; + pRhs->selFlags &= ~SF_MultiValue; if( yymsp[-1].minor.yy502!=TK_ALL ) pParse->hasCompound = 1; }else{ sqlite3SelectDelete(pParse->db, pLhs); @@ -179903,21 +178847,12 @@ static YYACTIONTYPE yy_reduce( ** expr1 IN () ** expr1 NOT IN () ** - ** simplify to constants 0 (false) and 1 (true), respectively. - ** - ** Except, do not apply this optimization if expr1 contains a function - ** because that function might be an aggregate (we don't know yet whether - ** it is or not) and if it is an aggregate, that could change the meaning - ** of the whole query. + ** simplify to constants 0 (false) and 1 (true), respectively, + ** regardless of the value of expr1. */ - Expr *pB = sqlite3Expr(pParse->db, TK_STRING, yymsp[-3].minor.yy502 ? "true" : "false"); - if( pB ) sqlite3ExprIdToTrueFalse(pB); - if( !ExprHasProperty(yymsp[-4].minor.yy590, EP_HasFunc) ){ - sqlite3ExprUnmapAndDelete(pParse, yymsp[-4].minor.yy590); - yymsp[-4].minor.yy590 = pB; - }else{ - yymsp[-4].minor.yy590 = sqlite3PExpr(pParse, yymsp[-3].minor.yy502 ? TK_OR : TK_AND, pB, yymsp[-4].minor.yy590); - } + sqlite3ExprUnmapAndDelete(pParse, yymsp[-4].minor.yy590); + yymsp[-4].minor.yy590 = sqlite3Expr(pParse->db, TK_STRING, yymsp[-3].minor.yy502 ? "true" : "false"); + if( yymsp[-4].minor.yy590 ) sqlite3ExprIdToTrueFalse(yymsp[-4].minor.yy590); }else{ Expr *pRHS = yymsp[-1].minor.yy402->a[0].pExpr; if( yymsp[-1].minor.yy402->nExpr==1 && sqlite3ExprIsConstant(pParse,pRHS) && yymsp[-4].minor.yy590->op!=TK_VECTOR ){ @@ -180077,10 +179012,6 @@ static YYACTIONTYPE yy_reduce( { sqlite3BeginTrigger(pParse, &yymsp[-7].minor.yy0, &yymsp[-6].minor.yy0, yymsp[-5].minor.yy502, yymsp[-4].minor.yy28.a, yymsp[-4].minor.yy28.b, yymsp[-2].minor.yy563, yymsp[0].minor.yy590, yymsp[-10].minor.yy502, yymsp[-8].minor.yy502); yymsp[-10].minor.yy0 = (yymsp[-6].minor.yy0.n==0?yymsp[-7].minor.yy0:yymsp[-6].minor.yy0); /*A-overwrites-T*/ -#ifdef SQLITE_DEBUG - assert( pParse->isCreate ); /* Set by createkw reduce action */ - pParse->isCreate = 0; /* But, should not be set for CREATE TRIGGER */ -#endif } break; case 262: /* trigger_time ::= BEFORE|AFTER */ @@ -181523,7 +180454,7 @@ static int getToken(const unsigned char **pz){ int t; /* Token type to return */ do { z += sqlite3GetToken(z, &t); - }while( t==TK_SPACE || t==TK_COMMENT ); + }while( t==TK_SPACE ); if( t==TK_ID || t==TK_STRING || t==TK_JOIN_KW @@ -182016,11 +180947,7 @@ SQLITE_PRIVATE int sqlite3RunParser(Parse *pParse, const char *zSql){ assert( n==6 ); tokenType = analyzeFilterKeyword((const u8*)&zSql[6], lastTokenParsed); #endif /* SQLITE_OMIT_WINDOWFUNC */ - }else if( tokenType==TK_COMMENT - && (db->init.busy || (db->flags & SQLITE_Comments)!=0) - ){ - /* Ignore SQL comments if either (1) we are reparsing the schema or - ** (2) SQLITE_DBCONFIG_ENABLE_COMMENTS is turned on (the default). */ + }else if( tokenType==TK_COMMENT && (db->flags & SQLITE_Comments)!=0 ){ zSql += n; continue; }else if( tokenType!=TK_QNUMBER ){ @@ -182915,14 +181842,6 @@ SQLITE_API int sqlite3_initialize(void){ if( rc==SQLITE_OK ){ sqlite3PCacheBufferSetup( sqlite3GlobalConfig.pPage, sqlite3GlobalConfig.szPage, sqlite3GlobalConfig.nPage); -#ifdef SQLITE_EXTRA_INIT_MUTEXED - { - int SQLITE_EXTRA_INIT_MUTEXED(const char*); - rc = SQLITE_EXTRA_INIT_MUTEXED(0); - } -#endif - } - if( rc==SQLITE_OK ){ sqlite3MemoryBarrier(); sqlite3GlobalConfig.isInit = 1; #ifdef SQLITE_EXTRA_INIT @@ -183379,22 +182298,17 @@ SQLITE_API int sqlite3_config(int op, ...){ ** If lookaside is already active, return SQLITE_BUSY. ** ** The sz parameter is the number of bytes in each lookaside slot. -** The cnt parameter is the number of slots. If pBuf is NULL the -** space for the lookaside memory is obtained from sqlite3_malloc() -** or similar. If pBuf is not NULL then it is sz*cnt bytes of memory -** to use for the lookaside memory. +** The cnt parameter is the number of slots. If pStart is NULL the +** space for the lookaside memory is obtained from sqlite3_malloc(). +** If pStart is not NULL then it is sz*cnt bytes of memory to use for +** the lookaside memory. */ -static int setupLookaside( - sqlite3 *db, /* Database connection being configured */ - void *pBuf, /* Memory to use for lookaside. May be NULL */ - int sz, /* Desired size of each lookaside memory slot */ - int cnt /* Number of slots to allocate */ -){ +static int setupLookaside(sqlite3 *db, void *pBuf, int sz, int cnt){ #ifndef SQLITE_OMIT_LOOKASIDE - void *pStart; /* Start of the lookaside buffer */ - sqlite3_int64 szAlloc; /* Total space set aside for lookaside memory */ - int nBig; /* Number of full-size slots */ - int nSm; /* Number smaller LOOKASIDE_SMALL-byte slots */ + void *pStart; + sqlite3_int64 szAlloc; + int nBig; /* Number of full-size slots */ + int nSm; /* Number smaller LOOKASIDE_SMALL-byte slots */ if( sqlite3LookasideUsed(db,0)>0 ){ return SQLITE_BUSY; @@ -183407,22 +182321,19 @@ static int setupLookaside( sqlite3_free(db->lookaside.pStart); } /* The size of a lookaside slot after ROUNDDOWN8 needs to be larger - ** than a pointer and small enough to fit in a u16. + ** than a pointer to be useful. */ - sz = ROUNDDOWN8(sz); + sz = ROUNDDOWN8(sz); /* IMP: R-33038-09382 */ if( sz<=(int)sizeof(LookasideSlot*) ) sz = 0; if( sz>65528 ) sz = 65528; - /* Count must be at least 1 to be useful, but not so large as to use - ** more than 0x7fff0000 total bytes for lookaside. */ - if( cnt<1 ) cnt = 0; - if( sz>0 && cnt>(0x7fff0000/sz) ) cnt = 0x7fff0000/sz; + if( cnt<0 ) cnt = 0; szAlloc = (i64)sz*(i64)cnt; - if( szAlloc==0 ){ + if( sz==0 || cnt==0 ){ sz = 0; pStart = 0; }else if( pBuf==0 ){ sqlite3BeginBenignMalloc(); - pStart = sqlite3Malloc( szAlloc ); + pStart = sqlite3Malloc( szAlloc ); /* IMP: R-61949-35727 */ sqlite3EndBenignMalloc(); if( pStart ) szAlloc = sqlite3MallocSize(pStart); }else{ @@ -184399,9 +183310,6 @@ SQLITE_API int sqlite3_busy_handler( db->busyHandler.pBusyArg = pArg; db->busyHandler.nBusy = 0; db->busyTimeout = 0; -#ifdef SQLITE_ENABLE_SETLK_TIMEOUT - db->setlkTimeout = 0; -#endif sqlite3_mutex_leave(db->mutex); return SQLITE_OK; } @@ -184451,49 +183359,12 @@ SQLITE_API int sqlite3_busy_timeout(sqlite3 *db, int ms){ sqlite3_busy_handler(db, (int(*)(void*,int))sqliteDefaultBusyCallback, (void*)db); db->busyTimeout = ms; -#ifdef SQLITE_ENABLE_SETLK_TIMEOUT - db->setlkTimeout = ms; -#endif }else{ sqlite3_busy_handler(db, 0, 0); } return SQLITE_OK; } -/* -** Set the setlk timeout value. -*/ -SQLITE_API int sqlite3_setlk_timeout(sqlite3 *db, int ms, int flags){ -#ifdef SQLITE_ENABLE_SETLK_TIMEOUT - int iDb; - int bBOC = ((flags & SQLITE_SETLK_BLOCK_ON_CONNECT) ? 1 : 0); -#endif -#ifdef SQLITE_ENABLE_API_ARMOR - if( !sqlite3SafetyCheckOk(db) ) return SQLITE_MISUSE_BKPT; -#endif - if( ms<-1 ) return SQLITE_RANGE; -#ifdef SQLITE_ENABLE_SETLK_TIMEOUT - sqlite3_mutex_enter(db->mutex); - db->setlkTimeout = ms; - db->setlkFlags = flags; - sqlite3BtreeEnterAll(db); - for(iDb=0; iDbnDb; iDb++){ - Btree *pBt = db->aDb[iDb].pBt; - if( pBt ){ - sqlite3_file *fd = sqlite3PagerFile(sqlite3BtreePager(pBt)); - sqlite3OsFileControlHint(fd, SQLITE_FCNTL_BLOCK_ON_CONNECT, (void*)&bBOC); - } - } - sqlite3BtreeLeaveAll(db); - sqlite3_mutex_leave(db->mutex); -#endif -#if !defined(SQLITE_ENABLE_API_ARMOR) && !defined(SQLITE_ENABLE_SETLK_TIMEOUT) - UNUSED_PARAMETER(db); - UNUSED_PARAMETER(flags); -#endif - return SQLITE_OK; -} - /* ** Cause any pending operation to stop at its earliest opportunity. */ @@ -186459,7 +185330,7 @@ SQLITE_API int sqlite3_set_clientdata( return SQLITE_OK; }else{ size_t n = strlen(zName); - p = sqlite3_malloc64( SZ_DBCLIENTDATA(n+1) ); + p = sqlite3_malloc64( sizeof(DbClientData)+n+1 ); if( p==0 ){ if( xDestructor ) xDestructor(pData); sqlite3_mutex_leave(db->mutex); @@ -186613,10 +185484,13 @@ SQLITE_API int sqlite3_table_column_metadata( if( zColumnName==0 ){ /* Query for existence of table only */ }else{ - iCol = sqlite3ColumnIndex(pTab, zColumnName); - if( iCol>=0 ){ + for(iCol=0; iColnCol; iCol++){ pCol = &pTab->aCol[iCol]; - }else{ + if( 0==sqlite3StrICmp(pCol->zCnName, zColumnName) ){ + break; + } + } + if( iCol==pTab->nCol ){ if( HasRowid(pTab) && sqlite3IsRowid(zColumnName) ){ iCol = pTab->iPKey; pCol = iCol>=0 ? &pTab->aCol[iCol] : 0; @@ -186825,8 +185699,8 @@ SQLITE_API int sqlite3_test_control(int op, ...){ /* sqlite3_test_control(SQLITE_TESTCTRL_FK_NO_ACTION, sqlite3 *db, int b); ** ** If b is true, then activate the SQLITE_FkNoAction setting. If b is - ** false then clear that setting. If the SQLITE_FkNoAction setting is - ** enabled, all foreign key ON DELETE and ON UPDATE actions behave as if + ** false then clearn that setting. If the SQLITE_FkNoAction setting is + ** abled, all foreign key ON DELETE and ON UPDATE actions behave as if ** they were NO ACTION, regardless of how they are defined. ** ** NB: One must usually run "PRAGMA writable_schema=RESET" after @@ -188173,7 +187047,7 @@ SQLITE_PRIVATE void sqlite3ConnectionClosed(sqlite3 *db){ ** Here, array { X } means zero or more occurrences of X, adjacent in ** memory. A "position" is an index of a token in the token stream ** generated by the tokenizer. Note that POS_END and POS_COLUMN occur -** in the same logical place as the position element, and act as sentinels +** in the same logical place as the position element, and act as sentinals ** ending a position list array. POS_END is 0. POS_COLUMN is 1. ** The positions numbers are not stored literally but rather as two more ** than the difference from the prior position, or the just the position plus @@ -188392,13 +187266,6 @@ SQLITE_PRIVATE void sqlite3ConnectionClosed(sqlite3 *db){ #ifndef _FTSINT_H #define _FTSINT_H -/* #include */ -/* #include */ -/* #include */ -/* #include */ -/* #include */ -/* #include */ - #if !defined(NDEBUG) && !defined(SQLITE_DEBUG) # define NDEBUG 1 #endif @@ -188868,19 +187735,6 @@ typedef sqlite3_int64 i64; /* 8-byte signed integer */ #define deliberate_fall_through -/* -** Macros needed to provide flexible arrays in a portable way -*/ -#ifndef offsetof -# define offsetof(STRUCTURE,FIELD) ((size_t)((char*)&((STRUCTURE*)0)->FIELD)) -#endif -#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) -# define FLEXARRAY -#else -# define FLEXARRAY 1 -#endif - - #endif /* SQLITE_AMALGAMATION */ #ifdef SQLITE_DEBUG @@ -188985,7 +187839,7 @@ struct Fts3Table { #endif #if defined(SQLITE_DEBUG) || defined(SQLITE_TEST) - /* True to disable the incremental doclist optimization. This is controlled + /* True to disable the incremental doclist optimization. This is controled ** by special insert command 'test-no-incr-doclist'. */ int bNoIncrDoclist; @@ -189037,7 +187891,7 @@ struct Fts3Cursor { /* ** The Fts3Cursor.eSearch member is always set to one of the following. -** Actually, Fts3Cursor.eSearch can be greater than or equal to +** Actualy, Fts3Cursor.eSearch can be greater than or equal to ** FTS3_FULLTEXT_SEARCH. If so, then Fts3Cursor.eSearch - 2 is the index ** of the column to be searched. For example, in ** @@ -189110,13 +187964,9 @@ struct Fts3Phrase { */ int nToken; /* Number of tokens in the phrase */ int iColumn; /* Index of column this phrase must match */ - Fts3PhraseToken aToken[FLEXARRAY]; /* One for each token in the phrase */ + Fts3PhraseToken aToken[1]; /* One entry for each token in the phrase */ }; -/* Size (in bytes) of an Fts3Phrase object large enough to hold N tokens */ -#define SZ_FTS3PHRASE(N) \ - (offsetof(Fts3Phrase,aToken)+(N)*sizeof(Fts3PhraseToken)) - /* ** A tree of these objects forms the RHS of a MATCH operator. ** @@ -189350,6 +188200,12 @@ SQLITE_PRIVATE int sqlite3Fts3IntegrityCheck(Fts3Table *p, int *pbOk); # define SQLITE_CORE 1 #endif +/* #include */ +/* #include */ +/* #include */ +/* #include */ +/* #include */ +/* #include */ /* #include "fts3.h" */ #ifndef SQLITE_CORE @@ -191688,7 +190544,7 @@ static int fts3DoclistOrMerge( ** sizes of the two inputs, plus enough space for exactly one of the input ** docids to grow. ** - ** A symmetric argument may be made if the doclists are in descending + ** A symetric argument may be made if the doclists are in descending ** order. */ aOut = sqlite3_malloc64((i64)n1+n2+FTS3_VARINT_MAX-1+FTS3_BUFFER_PADDING); @@ -193487,7 +192343,7 @@ static int fts3EvalDeferredPhrase(Fts3Cursor *pCsr, Fts3Phrase *pPhrase){ nDistance = iPrev - nMaxUndeferred; } - aOut = (char *)sqlite3Fts3MallocZero(((i64)nPoslist)+FTS3_BUFFER_PADDING); + aOut = (char *)sqlite3Fts3MallocZero(nPoslist+FTS3_BUFFER_PADDING); if( !aOut ){ sqlite3_free(aPoslist); return SQLITE_NOMEM; @@ -193786,7 +192642,7 @@ static int incrPhraseTokenNext( ** ** * does not contain any deferred tokens. ** -** Advance it to the next matching document in the database and populate +** Advance it to the next matching documnent in the database and populate ** the Fts3Doclist.pList and nList fields. ** ** If there is no "next" entry and no error occurs, then *pbEof is set to @@ -194793,7 +193649,7 @@ static int fts3EvalNext(Fts3Cursor *pCsr){ } /* -** Restart iteration for expression pExpr so that the next call to +** Restart interation for expression pExpr so that the next call to ** fts3EvalNext() visits the first row. Do not allow incremental ** loading or merging of phrase doclists for this iteration. ** @@ -195985,23 +194841,6 @@ SQLITE_PRIVATE int sqlite3Fts3OpenTokenizer( */ static int fts3ExprParse(ParseContext *, const char *, int, Fts3Expr **, int *); -/* -** Search buffer z[], size n, for a '"' character. Or, if enable_parenthesis -** is defined, search for '(' and ')' as well. Return the index of the first -** such character in the buffer. If there is no such character, return -1. -*/ -static int findBarredChar(const char *z, int n){ - int ii; - for(ii=0; iiiLangid, z, n, &pCursor); + /* Set variable i to the maximum number of bytes of input to tokenize. */ + for(i=0; iiLangid, z, i, &pCursor); if( rc==SQLITE_OK ){ const char *zToken; int nToken = 0, iStart = 0, iEnd = 0, iPosition = 0; @@ -196036,18 +194882,7 @@ static int getNextToken( rc = pModule->xNext(pCursor, &zToken, &nToken, &iStart, &iEnd, &iPosition); if( rc==SQLITE_OK ){ - /* Check that this tokenization did not gobble up any " characters. Or, - ** if enable_parenthesis is true, that it did not gobble up any - ** open or close parenthesis characters either. If it did, call - ** getNextToken() again, but pass only that part of the input buffer - ** up to the first such character. */ - int iBarred = findBarredChar(z, iEnd); - if( iBarred>=0 ){ - pModule->xClose(pCursor); - return getNextToken(pParse, iCol, z, iBarred, ppExpr, pnConsumed); - } - - nByte = sizeof(Fts3Expr) + SZ_FTS3PHRASE(1) + nToken; + nByte = sizeof(Fts3Expr) + sizeof(Fts3Phrase) + nToken; pRet = (Fts3Expr *)sqlite3Fts3MallocZero(nByte); if( !pRet ){ rc = SQLITE_NOMEM; @@ -196057,7 +194892,7 @@ static int getNextToken( pRet->pPhrase->nToken = 1; pRet->pPhrase->iColumn = iCol; pRet->pPhrase->aToken[0].n = nToken; - pRet->pPhrase->aToken[0].z = (char*)&pRet->pPhrase->aToken[1]; + pRet->pPhrase->aToken[0].z = (char *)&pRet->pPhrase[1]; memcpy(pRet->pPhrase->aToken[0].z, zToken, nToken); if( iEnd=0 ){ - *pnConsumed = iBarred; - } + }else if( i && rc==SQLITE_DONE ){ rc = SQLITE_OK; } @@ -196132,9 +194963,9 @@ static int getNextString( Fts3Expr *p = 0; sqlite3_tokenizer_cursor *pCursor = 0; char *zTemp = 0; - i64 nTemp = 0; + int nTemp = 0; - const int nSpace = sizeof(Fts3Expr) + SZ_FTS3PHRASE(1); + const int nSpace = sizeof(Fts3Expr) + sizeof(Fts3Phrase); int nToken = 0; /* The final Fts3Expr data structure, including the Fts3Phrase, @@ -196506,7 +195337,7 @@ static int fts3ExprParse( /* The isRequirePhrase variable is set to true if a phrase or ** an expression contained in parenthesis is required. If a - ** binary operator (AND, OR, NOT or NEAR) is encountered when + ** binary operator (AND, OR, NOT or NEAR) is encounted when ** isRequirePhrase is set, this is a syntax error. */ if( !isPhrase && isRequirePhrase ){ @@ -197088,6 +195919,7 @@ static void fts3ExprTestCommon( } if( rc!=SQLITE_OK && rc!=SQLITE_NOMEM ){ + sqlite3Fts3ExprFree(pExpr); sqlite3_result_error(context, "Error parsing expression", -1); }else if( rc==SQLITE_NOMEM || !(zBuf = exprToString(pExpr, 0)) ){ sqlite3_result_error_nomem(context); @@ -197330,7 +196162,7 @@ static void fts3HashInsertElement( } -/* Resize the hash table so that it contains "new_size" buckets. +/* Resize the hash table so that it cantains "new_size" buckets. ** "new_size" must be a power of 2. The hash table might fail ** to resize if sqliteMalloc() fails. ** @@ -197785,7 +196617,7 @@ static int star_oh(const char *z){ /* ** If the word ends with zFrom and xCond() is true for the stem -** of the word that precedes the zFrom ending, then change the +** of the word that preceeds the zFrom ending, then change the ** ending to zTo. ** ** The input word *pz and zFrom are both in reverse order. zTo @@ -199296,7 +198128,7 @@ static int fts3tokFilterMethod( fts3tokResetCursor(pCsr); if( idxNum==1 ){ const char *zByte = (const char *)sqlite3_value_text(apVal[0]); - sqlite3_int64 nByte = sqlite3_value_bytes(apVal[0]); + int nByte = sqlite3_value_bytes(apVal[0]); pCsr->zInput = sqlite3_malloc64(nByte+1); if( pCsr->zInput==0 ){ rc = SQLITE_NOMEM; @@ -203368,7 +202200,7 @@ static int fts3IncrmergePush( ** ** It is assumed that the buffer associated with pNode is already large ** enough to accommodate the new entry. The buffer associated with pPrev -** is extended by this function if required. +** is extended by this function if requrired. ** ** If an error (i.e. OOM condition) occurs, an SQLite error code is ** returned. Otherwise, SQLITE_OK. @@ -205031,7 +203863,7 @@ SQLITE_PRIVATE int sqlite3Fts3DeferToken( /* ** SQLite value pRowid contains the rowid of a row that may or may not be ** present in the FTS3 table. If it is, delete it and adjust the contents -** of subsidiary data structures accordingly. +** of subsiduary data structures accordingly. */ static int fts3DeleteByRowid( Fts3Table *p, @@ -205357,13 +204189,9 @@ struct MatchinfoBuffer { int nElem; int bGlobal; /* Set if global data is loaded */ char *zMatchinfo; - u32 aMI[FLEXARRAY]; + u32 aMatchinfo[1]; }; -/* Size (in bytes) of a MatchinfoBuffer sufficient for N elements */ -#define SZ_MATCHINFOBUFFER(N) \ - (offsetof(MatchinfoBuffer,aMI)+(((N)+1)/2)*sizeof(u64)) - /* ** The snippet() and offsets() functions both return text values. An instance @@ -205388,13 +204216,13 @@ struct StrBuffer { static MatchinfoBuffer *fts3MIBufferNew(size_t nElem, const char *zMatchinfo){ MatchinfoBuffer *pRet; sqlite3_int64 nByte = sizeof(u32) * (2*(sqlite3_int64)nElem + 1) - + SZ_MATCHINFOBUFFER(1); + + sizeof(MatchinfoBuffer); sqlite3_int64 nStr = strlen(zMatchinfo); pRet = sqlite3Fts3MallocZero(nByte + nStr+1); if( pRet ){ - pRet->aMI[0] = (u8*)(&pRet->aMI[1]) - (u8*)pRet; - pRet->aMI[1+nElem] = pRet->aMI[0] + pRet->aMatchinfo[0] = (u8*)(&pRet->aMatchinfo[1]) - (u8*)pRet; + pRet->aMatchinfo[1+nElem] = pRet->aMatchinfo[0] + sizeof(u32)*((int)nElem+1); pRet->nElem = (int)nElem; pRet->zMatchinfo = ((char*)pRet) + nByte; @@ -205408,10 +204236,10 @@ static MatchinfoBuffer *fts3MIBufferNew(size_t nElem, const char *zMatchinfo){ static void fts3MIBufferFree(void *p){ MatchinfoBuffer *pBuf = (MatchinfoBuffer*)((u8*)p - ((u32*)p)[-1]); - assert( (u32*)p==&pBuf->aMI[1] - || (u32*)p==&pBuf->aMI[pBuf->nElem+2] + assert( (u32*)p==&pBuf->aMatchinfo[1] + || (u32*)p==&pBuf->aMatchinfo[pBuf->nElem+2] ); - if( (u32*)p==&pBuf->aMI[1] ){ + if( (u32*)p==&pBuf->aMatchinfo[1] ){ pBuf->aRef[1] = 0; }else{ pBuf->aRef[2] = 0; @@ -205428,18 +204256,18 @@ static void (*fts3MIBufferAlloc(MatchinfoBuffer *p, u32 **paOut))(void*){ if( p->aRef[1]==0 ){ p->aRef[1] = 1; - aOut = &p->aMI[1]; + aOut = &p->aMatchinfo[1]; xRet = fts3MIBufferFree; } else if( p->aRef[2]==0 ){ p->aRef[2] = 1; - aOut = &p->aMI[p->nElem+2]; + aOut = &p->aMatchinfo[p->nElem+2]; xRet = fts3MIBufferFree; }else{ aOut = (u32*)sqlite3_malloc64(p->nElem * sizeof(u32)); if( aOut ){ xRet = sqlite3_free; - if( p->bGlobal ) memcpy(aOut, &p->aMI[1], p->nElem*sizeof(u32)); + if( p->bGlobal ) memcpy(aOut, &p->aMatchinfo[1], p->nElem*sizeof(u32)); } } @@ -205449,7 +204277,7 @@ static void (*fts3MIBufferAlloc(MatchinfoBuffer *p, u32 **paOut))(void*){ static void fts3MIBufferSetGlobal(MatchinfoBuffer *p){ p->bGlobal = 1; - memcpy(&p->aMI[2+p->nElem], &p->aMI[1], p->nElem*sizeof(u32)); + memcpy(&p->aMatchinfo[2+p->nElem], &p->aMatchinfo[1], p->nElem*sizeof(u32)); } /* @@ -205864,7 +204692,7 @@ static int fts3StringAppend( } /* If there is insufficient space allocated at StrBuffer.z, use realloc() - ** to grow the buffer until so that it is big enough to accommodate the + ** to grow the buffer until so that it is big enough to accomadate the ** appended data. */ if( pStr->n+nAppend+1>=pStr->nAlloc ){ @@ -206276,16 +205104,16 @@ static size_t fts3MatchinfoSize(MatchInfo *pInfo, char cArg){ break; case FTS3_MATCHINFO_LHITS: - nVal = (size_t)pInfo->nCol * pInfo->nPhrase; + nVal = pInfo->nCol * pInfo->nPhrase; break; case FTS3_MATCHINFO_LHITS_BM: - nVal = (size_t)pInfo->nPhrase * ((pInfo->nCol + 31) / 32); + nVal = pInfo->nPhrase * ((pInfo->nCol + 31) / 32); break; default: assert( cArg==FTS3_MATCHINFO_HITS ); - nVal = (size_t)pInfo->nCol * pInfo->nPhrase * 3; + nVal = pInfo->nCol * pInfo->nPhrase * 3; break; } @@ -207843,8 +206671,8 @@ SQLITE_PRIVATE int sqlite3FtsUnicodeFold(int c, int eRemoveDiacritic){ ** Beginning with version 3.45.0 (circa 2024-01-01), these routines also ** accept BLOB values that have JSON encoded using a binary representation ** called "JSONB". The name JSONB comes from PostgreSQL, however the on-disk -** format for SQLite-JSONB is completely different and incompatible with -** PostgreSQL-JSONB. +** format SQLite JSONB is completely different and incompatible with +** PostgreSQL JSONB. ** ** Decoding and interpreting JSONB is still O(N) where N is the size of ** the input, the same as text JSON. However, the constant of proportionality @@ -207901,7 +206729,7 @@ SQLITE_PRIVATE int sqlite3FtsUnicodeFold(int c, int eRemoveDiacritic){ ** ** The payload size need not be expressed in its minimal form. For example, ** if the payload size is 10, the size can be expressed in any of 5 different -** ways: (1) (X>>4)==10, (2) (X>>4)==12 following by one 0x0a byte, +** ways: (1) (X>>4)==10, (2) (X>>4)==12 following by on 0x0a byte, ** (3) (X>>4)==13 followed by 0x00 and 0x0a, (4) (X>>4)==14 followed by ** 0x00 0x00 0x00 0x0a, or (5) (X>>4)==15 followed by 7 bytes of 0x00 and ** a single byte of 0x0a. The shorter forms are preferred, of course, but @@ -207911,7 +206739,7 @@ SQLITE_PRIVATE int sqlite3FtsUnicodeFold(int c, int eRemoveDiacritic){ ** the size when it becomes known, resulting in a non-minimal encoding. ** ** The value (X>>4)==15 is not actually used in the current implementation -** (as SQLite is currently unable to handle BLOBs larger than about 2GB) +** (as SQLite is currently unable handle BLOBs larger than about 2GB) ** but is included in the design to allow for future enhancements. ** ** The payload follows the header. NULL, TRUE, and FALSE have no payload and @@ -207971,47 +206799,23 @@ static const char * const jsonbType[] = { ** increase for the text-JSON parser. (Ubuntu14.10 gcc 4.8.4 x64 with -Os). */ static const char jsonIsSpace[] = { -#ifdef SQLITE_ASCII -/*0 1 2 3 4 5 6 7 8 9 a b c d e f */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, /* 0 */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 1 */ - 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 2 */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 3 */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 4 */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 5 */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 6 */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 7 */ - - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 8 */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 9 */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* a */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* b */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* c */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* d */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* e */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* f */ -#endif -#ifdef SQLITE_EBCDIC -/*0 1 2 3 4 5 6 7 8 9 a b c d e f */ - 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, /* 0 */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 1 */ - 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 2 */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 3 */ - 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 4 */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 5 */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 6 */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 7 */ - - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 8 */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 9 */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* a */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* b */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* c */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* d */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* e */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* f */ -#endif + 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, }; #define jsonIsspace(x) (jsonIsSpace[(unsigned char)x]) @@ -208019,13 +206823,7 @@ static const char jsonIsSpace[] = { ** The set of all space characters recognized by jsonIsspace(). ** Useful as the second argument to strspn(). */ -#ifdef SQLITE_ASCII static const char jsonSpaces[] = "\011\012\015\040"; -#endif -#ifdef SQLITE_EBCDIC -static const char jsonSpaces[] = "\005\045\015\100"; -#endif - /* ** Characters that are special to JSON. Control characters, @@ -208034,46 +206832,23 @@ static const char jsonSpaces[] = "\005\045\015\100"; ** it in the set of special characters. */ static const char jsonIsOk[256] = { -#ifdef SQLITE_ASCII -/*0 1 2 3 4 5 6 7 8 9 a b c d e f */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 0 */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 1 */ - 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, /* 2 */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* 3 */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* 4 */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, /* 5 */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* 6 */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* 7 */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* 8 */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* 9 */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* a */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* b */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* c */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* d */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* e */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 /* f */ -#endif -#ifdef SQLITE_EBCDIC -/*0 1 2 3 4 5 6 7 8 9 a b c d e f */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 0 */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 1 */ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 2 */ - 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, /* 3 */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* 4 */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* 5 */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* 6 */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, /* 7 */ - - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* 8 */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* 9 */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* a */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* b */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* c */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* d */ - 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* e */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 /* f */ -#endif + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }; /* Objects */ @@ -208218,7 +206993,7 @@ struct JsonParse { ** Forward references **************************************************************************/ static void jsonReturnStringAsBlob(JsonString*); -static int jsonArgIsJsonb(sqlite3_value *pJson, JsonParse *p); +static int jsonFuncArgMightBeBinary(sqlite3_value *pJson); static u32 jsonTranslateBlobToText(const JsonParse*,u32,JsonString*); static void jsonReturnParse(sqlite3_context*,JsonParse*); static JsonParse *jsonParseFuncArg(sqlite3_context*,sqlite3_value*,u32); @@ -208292,7 +207067,7 @@ static int jsonCacheInsert( ** most-recently used entry if it isn't so already. ** ** The JsonParse object returned still belongs to the Cache and might -** be deleted at any moment. If the caller wants the JsonParse to +** be deleted at any moment. If the caller whants the JsonParse to ** linger, it needs to increment the nPJRef reference counter. */ static JsonParse *jsonCacheSearch( @@ -208636,9 +207411,11 @@ static void jsonAppendSqlValue( break; } default: { - JsonParse px; - memset(&px, 0, sizeof(px)); - if( jsonArgIsJsonb(pValue, &px) ){ + if( jsonFuncArgMightBeBinary(pValue) ){ + JsonParse px; + memset(&px, 0, sizeof(px)); + px.aBlob = (u8*)sqlite3_value_blob(pValue); + px.nBlob = sqlite3_value_bytes(pValue); jsonTranslateBlobToText(&px, 0, p); }else if( p->eErr==0 ){ sqlite3_result_error(p->pCtx, "JSON cannot hold BLOB values", -1); @@ -208957,7 +207734,7 @@ static void jsonWrongNumArgs( */ static int jsonBlobExpand(JsonParse *pParse, u32 N){ u8 *aNew; - u64 t; + u32 t; assert( N>pParse->nBlobAlloc ); if( pParse->nBlobAlloc==0 ){ t = 100; @@ -208967,9 +207744,8 @@ static int jsonBlobExpand(JsonParse *pParse, u32 N){ if( tdb, pParse->aBlob, t); if( aNew==0 ){ pParse->oom = 1; return 1; } - assert( t<0x7fffffff ); pParse->aBlob = aNew; - pParse->nBlobAlloc = (u32)t; + pParse->nBlobAlloc = t; return 0; } @@ -209036,7 +207812,7 @@ static SQLITE_NOINLINE void jsonBlobExpandAndAppendNode( } -/* Append a node type byte together with the payload size and +/* Append an node type byte together with the payload size and ** possibly also the payload. ** ** If aPayload is not NULL, then it is a pointer to the payload which @@ -209105,10 +207881,8 @@ static int jsonBlobChangePayloadSize( nExtra = 1; }else if( szType==13 ){ nExtra = 2; - }else if( szType==14 ){ - nExtra = 4; }else{ - nExtra = 8; + nExtra = 4; } if( szPayload<=11 ){ nNeeded = 0; @@ -209578,12 +208352,7 @@ json_parse_restart: || c=='n' || c=='r' || c=='t' || (c=='u' && jsonIs4Hex(&z[j+1])) ){ if( opcode==JSONB_TEXT ) opcode = JSONB_TEXTJ; - }else if( c=='\'' || c=='v' || c=='\n' -#ifdef SQLITE_BUG_COMPATIBLE_20250510 - || (c=='0') /* Legacy bug compatible */ -#else - || (c=='0' && !sqlite3Isdigit(z[j+1])) /* Correct implementation */ -#endif + }else if( c=='\'' || c=='0' || c=='v' || c=='\n' || (0xe2==(u8)c && 0x80==(u8)z[j+1] && (0xa8==(u8)z[j+2] || 0xa9==(u8)z[j+2])) || (c=='x' && jsonIs2Hex(&z[j+1])) ){ @@ -209933,7 +208702,10 @@ static u32 jsonbPayloadSize(const JsonParse *pParse, u32 i, u32 *pSz){ u8 x; u32 sz; u32 n; - assert( i<=pParse->nBlob ); + if( NEVER(i>pParse->nBlob) ){ + *pSz = 0; + return 0; + } x = pParse->aBlob[i]>>4; if( x<=11 ){ sz = x; @@ -209970,15 +208742,15 @@ static u32 jsonbPayloadSize(const JsonParse *pParse, u32 i, u32 *pSz){ *pSz = 0; return 0; } - sz = ((u32)pParse->aBlob[i+5]<<24) + (pParse->aBlob[i+6]<<16) + + sz = (pParse->aBlob[i+5]<<24) + (pParse->aBlob[i+6]<<16) + (pParse->aBlob[i+7]<<8) + pParse->aBlob[i+8]; n = 9; } if( (i64)i+sz+n > pParse->nBlob && (i64)i+sz+n > pParse->nBlob-pParse->delta ){ - *pSz = 0; - return 0; + sz = 0; + n = 0; } *pSz = sz; return n; @@ -210075,12 +208847,9 @@ static u32 jsonTranslateBlobToText( } case JSONB_TEXT: case JSONB_TEXTJ: { - if( pOut->nUsed+sz+2<=pOut->nAlloc || jsonStringGrow(pOut, sz+2)==0 ){ - pOut->zBuf[pOut->nUsed] = '"'; - memcpy(pOut->zBuf+pOut->nUsed+1,(const char*)&pParse->aBlob[i+n],sz); - pOut->zBuf[pOut->nUsed+sz+1] = '"'; - pOut->nUsed += sz+2; - } + jsonAppendChar(pOut, '"'); + jsonAppendRaw(pOut, (const char*)&pParse->aBlob[i+n], sz); + jsonAppendChar(pOut, '"'); break; } case JSONB_TEXT5: { @@ -210319,6 +209088,33 @@ static u32 jsonTranslateBlobToPrettyText( return i; } + +/* Return true if the input pJson +** +** For performance reasons, this routine does not do a detailed check of the +** input BLOB to ensure that it is well-formed. Hence, false positives are +** possible. False negatives should never occur, however. +*/ +static int jsonFuncArgMightBeBinary(sqlite3_value *pJson){ + u32 sz, n; + const u8 *aBlob; + int nBlob; + JsonParse s; + if( sqlite3_value_type(pJson)!=SQLITE_BLOB ) return 0; + aBlob = sqlite3_value_blob(pJson); + nBlob = sqlite3_value_bytes(pJson); + if( nBlob<1 ) return 0; + if( NEVER(aBlob==0) || (aBlob[0] & 0x0f)>JSONB_OBJECT ) return 0; + memset(&s, 0, sizeof(s)); + s.aBlob = (u8*)aBlob; + s.nBlob = nBlob; + n = jsonbPayloadSize(&s, 0, &sz); + if( n==0 ) return 0; + if( sz+n!=(u32)nBlob ) return 0; + if( (aBlob[0] & 0x0f)<=JSONB_FALSE && sz>0 ) return 0; + return sz+n==(u32)nBlob; +} + /* ** Given that a JSONB_ARRAY object starts at offset i, return ** the number of entries in that array. @@ -210351,82 +209147,6 @@ static void jsonAfterEditSizeAdjust(JsonParse *pParse, u32 iRoot){ pParse->delta += jsonBlobChangePayloadSize(pParse, iRoot, sz); } -/* -** If the JSONB at aIns[0..nIns-1] can be expanded (by denormalizing the -** size field) by d bytes, then write the expansion into aOut[] and -** return true. In this way, an overwrite happens without changing the -** size of the JSONB, which reduces memcpy() operations and also make it -** faster and easier to update the B-Tree entry that contains the JSONB -** in the database. -** -** If the expansion of aIns[] by d bytes cannot be (easily) accomplished -** then return false. -** -** The d parameter is guaranteed to be between 1 and 8. -** -** This routine is an optimization. A correct answer is obtained if it -** always leaves the output unchanged and returns false. -*/ -static int jsonBlobOverwrite( - u8 *aOut, /* Overwrite here */ - const u8 *aIns, /* New content */ - u32 nIns, /* Bytes of new content */ - u32 d /* Need to expand new content by this much */ -){ - u32 szPayload; /* Bytes of payload */ - u32 i; /* New header size, after expansion & a loop counter */ - u8 szHdr; /* Size of header before expansion */ - - /* Lookup table for finding the upper 4 bits of the first byte of the - ** expanded aIns[], based on the size of the expanded aIns[] header: - ** - ** 2 3 4 5 6 7 8 9 */ - static const u8 aType[] = { 0xc0, 0xd0, 0, 0xe0, 0, 0, 0, 0xf0 }; - - if( (aIns[0]&0x0f)<=2 ) return 0; /* Cannot enlarge NULL, true, false */ - switch( aIns[0]>>4 ){ - default: { /* aIns[] header size 1 */ - if( ((1<=2 && i<=9 && aType[i-2]!=0 ); - aOut[0] = (aIns[0] & 0x0f) | aType[i-2]; - memcpy(&aOut[i], &aIns[szHdr], nIns-szHdr); - szPayload = nIns - szHdr; - while( 1/*edit-by-break*/ ){ - i--; - aOut[i] = szPayload & 0xff; - if( i==1 ) break; - szPayload >>= 8; - } - assert( (szPayload>>8)==0 ); - return 1; -} - /* ** Modify the JSONB blob at pParse->aBlob by removing nDel bytes of ** content beginning at iDel, and replacing them with nIns bytes of @@ -210448,11 +209168,6 @@ static void jsonBlobEdit( u32 nIns /* Bytes of content to insert */ ){ i64 d = (i64)nIns - (i64)nDel; - if( d<0 && d>=(-8) && aIns!=0 - && jsonBlobOverwrite(&pParse->aBlob[iDel], aIns, nIns, (int)-d) - ){ - return; - } if( d!=0 ){ if( pParse->nBlob + d > pParse->nBlobAlloc ){ jsonBlobExpand(pParse, pParse->nBlob+d); @@ -210464,9 +209179,7 @@ static void jsonBlobEdit( pParse->nBlob += d; pParse->delta += d; } - if( nIns && aIns ){ - memcpy(&pParse->aBlob[iDel], aIns, nIns); - } + if( nIns && aIns ) memcpy(&pParse->aBlob[iDel], aIns, nIns); } /* @@ -210551,21 +209264,7 @@ static u32 jsonUnescapeOneChar(const char *z, u32 n, u32 *piOut){ case 'r': { *piOut = '\r'; return 2; } case 't': { *piOut = '\t'; return 2; } case 'v': { *piOut = '\v'; return 2; } - case '0': { - /* JSON5 requires that the \0 escape not be followed by a digit. - ** But SQLite did not enforce this restriction in versions 3.42.0 - ** through 3.49.2. That was a bug. But some applications might have - ** come to depend on that bug. Use the SQLITE_BUG_COMPATIBLE_20250510 - ** option to restore the old buggy behavior. */ -#ifdef SQLITE_BUG_COMPATIBLE_20250510 - /* Legacy bug-compatible behavior */ - *piOut = 0; -#else - /* Correct behavior */ - *piOut = (n>2 && sqlite3Isdigit(z[2])) ? JSON_INVALID_CHAR : 0; -#endif - return 2; - } + case '0': { *piOut = 0; return 2; } case '\'': case '"': case '/': @@ -211065,7 +209764,7 @@ static void jsonReturnFromBlob( char *zOut; u32 nOut = sz; z = (const char*)&pParse->aBlob[i+n]; - zOut = sqlite3DbMallocRaw(db, ((u64)nOut)+1); + zOut = sqlite3DbMallocRaw(db, nOut+1); if( zOut==0 ) goto returnfromblob_oom; for(iIn=iOut=0; iInaBlob = (u8*)sqlite3_value_blob(pArg); + pParse->nBlob = sqlite3_value_bytes(pArg); + }else{ sqlite3_result_error(ctx, "JSON cannot hold BLOB values", -1); return 1; } @@ -211240,7 +209942,7 @@ static char *jsonBadPathError( } /* argv[0] is a BLOB that seems likely to be a JSONB. Subsequent -** arguments come in pairs where each pair contains a JSON path and +** arguments come in parse where each pair contains a JSON path and ** content to insert or set at that patch. Do the updates ** and return the result. ** @@ -211311,46 +210013,27 @@ jsonInsertIntoBlob_patherror: /* ** If pArg is a blob that seems like a JSONB blob, then initialize ** p to point to that JSONB and return TRUE. If pArg does not seem like -** a JSONB blob, then return FALSE. +** a JSONB blob, then return FALSE; ** -** For small BLOBs (having no more than 7 bytes of payload) a full -** validity check is done. So for small BLOBs this routine only returns -** true if the value is guaranteed to be a valid JSONB. For larger BLOBs -** (8 byte or more of payload) only the size of the outermost element is -** checked to verify that the BLOB is superficially valid JSONB. -** -** A full JSONB validation is done on smaller BLOBs because those BLOBs might -** also be text JSON that has been incorrectly cast into a BLOB. -** (See tag-20240123-a and https://sqlite.org/forum/forumpost/012136abd5) -** If the BLOB is 9 bytes are larger, then it is not possible for the -** superficial size check done here to pass if the input is really text -** JSON so we do not need to look deeper in that case. -** -** Why we only need to do full JSONB validation for smaller BLOBs: -** -** The first byte of valid JSON text must be one of: '{', '[', '"', ' ', '\n', -** '\r', '\t', '-', or a digit '0' through '9'. Of these, only a subset -** can also be the first byte of JSONB: '{', '[', and digits '3' -** through '9'. In every one of those cases, the payload size is 7 bytes -** or less. So if we do full JSONB validation for every BLOB where the -** payload is less than 7 bytes, we will never get a false positive for -** JSONB on an input that is really text JSON. +** This routine is only called if it is already known that pArg is a +** blob. The only open question is whether or not the blob appears +** to be a JSONB blob. */ static int jsonArgIsJsonb(sqlite3_value *pArg, JsonParse *p){ u32 n, sz = 0; - u8 c; - if( sqlite3_value_type(pArg)!=SQLITE_BLOB ) return 0; p->aBlob = (u8*)sqlite3_value_blob(pArg); p->nBlob = (u32)sqlite3_value_bytes(pArg); - if( p->nBlob>0 - && ALWAYS(p->aBlob!=0) - && ((c = p->aBlob[0]) & 0x0f)<=JSONB_OBJECT + if( p->nBlob==0 ){ + p->aBlob = 0; + return 0; + } + if( NEVER(p->aBlob==0) ){ + return 0; + } + if( (p->aBlob[0] & 0x0f)<=JSONB_OBJECT && (n = jsonbPayloadSize(p, 0, &sz))>0 && sz+n==p->nBlob - && ((c & 0x0f)>JSONB_FALSE || sz==0) - && (sz>7 - || (c!=0x7b && c!=0x5b && !sqlite3Isdigit(c)) - || jsonbValidityCheck(p, 0, p->nBlob, 1)==0) + && ((p->aBlob[0] & 0x0f)>JSONB_FALSE || sz==0) ){ return 1; } @@ -211428,7 +210111,7 @@ rebuild_from_cache: ** JSON functions were suppose to work. From the beginning, blob was ** reserved for expansion and a blob value should have raised an error. ** But it did not, due to a bug. And many applications came to depend - ** upon this buggy behavior, especially when using the CLI and reading + ** upon this buggy behavior, espeically when using the CLI and reading ** JSON text using readfile(), which returns a blob. For this reason ** we will continue to support the bug moving forward. ** See for example https://sqlite.org/forum/forumpost/012136abd5292b8d @@ -212443,17 +211126,21 @@ static void jsonValidFunc( return; } case SQLITE_BLOB: { - JsonParse py; - memset(&py, 0, sizeof(py)); - if( jsonArgIsJsonb(argv[0], &py) ){ + if( jsonFuncArgMightBeBinary(argv[0]) ){ if( flags & 0x04 ){ /* Superficial checking only - accomplished by the - ** jsonArgIsJsonb() call above. */ + ** jsonFuncArgMightBeBinary() call above. */ res = 1; }else if( flags & 0x08 ){ /* Strict checking. Check by translating BLOB->TEXT->BLOB. If ** no errors occur, call that a "strict check". */ - res = 0==jsonbValidityCheck(&py, 0, py.nBlob, 1); + JsonParse px; + u32 iErr; + memset(&px, 0, sizeof(px)); + px.aBlob = (u8*)sqlite3_value_blob(argv[0]); + px.nBlob = sqlite3_value_bytes(argv[0]); + iErr = jsonbValidityCheck(&px, 0, px.nBlob, 1); + res = iErr==0; } break; } @@ -212511,7 +211198,9 @@ static void jsonErrorFunc( UNUSED_PARAMETER(argc); memset(&s, 0, sizeof(s)); s.db = sqlite3_context_db_handle(ctx); - if( jsonArgIsJsonb(argv[0], &s) ){ + if( jsonFuncArgMightBeBinary(argv[0]) ){ + s.aBlob = (u8*)sqlite3_value_blob(argv[0]); + s.nBlob = sqlite3_value_bytes(argv[0]); iErrPos = (i64)jsonbValidityCheck(&s, 0, s.nBlob, 1); }else{ s.zJson = (char*)sqlite3_value_text(argv[0]); @@ -212672,20 +211361,18 @@ static void jsonObjectStep( UNUSED_PARAMETER(argc); pStr = (JsonString*)sqlite3_aggregate_context(ctx, sizeof(*pStr)); if( pStr ){ - z = (const char*)sqlite3_value_text(argv[0]); - n = sqlite3Strlen30(z); if( pStr->zBuf==0 ){ jsonStringInit(pStr, ctx); jsonAppendChar(pStr, '{'); - }else if( pStr->nUsed>1 && z!=0 ){ + }else if( pStr->nUsed>1 ){ jsonAppendChar(pStr, ','); } pStr->pCtx = ctx; - if( z!=0 ){ - jsonAppendString(pStr, z, n); - jsonAppendChar(pStr, ':'); - jsonAppendSqlValue(pStr, argv[1]); - } + z = (const char*)sqlite3_value_text(argv[0]); + n = sqlite3Strlen30(z); + jsonAppendString(pStr, z, n); + jsonAppendChar(pStr, ':'); + jsonAppendSqlValue(pStr, argv[1]); } } static void jsonObjectCompute(sqlite3_context *ctx, int isFinal){ @@ -213198,8 +211885,9 @@ static int jsonEachFilter( memset(&p->sParse, 0, sizeof(p->sParse)); p->sParse.nJPRef = 1; p->sParse.db = p->db; - if( jsonArgIsJsonb(argv[0], &p->sParse) ){ - /* We have JSONB */ + if( jsonFuncArgMightBeBinary(argv[0]) ){ + p->sParse.nBlob = sqlite3_value_bytes(argv[0]); + p->sParse.aBlob = (u8*)sqlite3_value_blob(argv[0]); }else{ p->sParse.zJson = (char*)sqlite3_value_text(argv[0]); p->sParse.nJson = sqlite3_value_bytes(argv[0]); @@ -213493,8 +212181,6 @@ SQLITE_PRIVATE int sqlite3JsonTableFunctions(sqlite3 *db){ #endif SQLITE_PRIVATE int sqlite3GetToken(const unsigned char*,int*); /* In the SQLite core */ -/* #include */ - /* ** If building separately, we will need some setup that is normally ** found in sqliteInt.h @@ -213525,14 +212211,6 @@ typedef unsigned int u32; # define ALWAYS(X) (X) # define NEVER(X) (X) #endif -#ifndef offsetof -#define offsetof(STRUCTURE,FIELD) ((size_t)((char*)&((STRUCTURE*)0)->FIELD)) -#endif -#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) -# define FLEXARRAY -#else -# define FLEXARRAY 1 -#endif #endif /* !defined(SQLITE_AMALGAMATION) */ /* Macro to check for 4-byte alignment. Only used inside of assert() */ @@ -213853,13 +212531,9 @@ struct RtreeMatchArg { RtreeGeomCallback cb; /* Info about the callback functions */ int nParam; /* Number of parameters to the SQL function */ sqlite3_value **apSqlParam; /* Original SQL parameter values */ - RtreeDValue aParam[FLEXARRAY]; /* Values for parameters to the SQL function */ + RtreeDValue aParam[1]; /* Values for parameters to the SQL function */ }; -/* Size of an RtreeMatchArg object with N parameters */ -#define SZ_RTREEMATCHARG(N) \ - (offsetof(RtreeMatchArg,aParam)+(N)*sizeof(RtreeDValue)) - #ifndef MAX # define MAX(x,y) ((x) < (y) ? (y) : (x)) #endif @@ -215548,7 +214222,7 @@ static int rtreeBestIndex(sqlite3_vtab *tab, sqlite3_index_info *pIdxInfo){ } /* -** Return the N-dimensional volume of the cell stored in *p. +** Return the N-dimensional volumn of the cell stored in *p. */ static RtreeDValue cellArea(Rtree *pRtree, RtreeCell *p){ RtreeDValue area = (RtreeDValue)1; @@ -217314,7 +215988,7 @@ static sqlite3_stmt *rtreeCheckPrepare( /* ** The second and subsequent arguments to this function are a printf() ** style format string and arguments. This function formats the string and -** appends it to the report being accumulated in pCheck. +** appends it to the report being accumuated in pCheck. */ static void rtreeCheckAppendMsg(RtreeCheck *pCheck, const char *zFmt, ...){ va_list ap; @@ -218502,7 +217176,7 @@ static void geopolyBBoxFinal( ** Determine if point (x0,y0) is beneath line segment (x1,y1)->(x2,y2). ** Returns: ** -** +2 x0,y0 is on the line segment +** +2 x0,y0 is on the line segement ** ** +1 x0,y0 is beneath line segment ** @@ -218608,7 +217282,7 @@ static void geopolyWithinFunc( sqlite3_free(p2); } -/* Objects used by the overlap algorithm. */ +/* Objects used by the overlap algorihm. */ typedef struct GeoEvent GeoEvent; typedef struct GeoSegment GeoSegment; typedef struct GeoOverlap GeoOverlap; @@ -219655,7 +218329,8 @@ static void geomCallback(sqlite3_context *ctx, int nArg, sqlite3_value **aArg){ sqlite3_int64 nBlob; int memErr = 0; - nBlob = SZ_RTREEMATCHARG(nArg) + nArg*sizeof(sqlite3_value*); + nBlob = sizeof(RtreeMatchArg) + (nArg-1)*sizeof(RtreeDValue) + + nArg*sizeof(sqlite3_value*); pBlob = (RtreeMatchArg *)sqlite3_malloc64(nBlob); if( !pBlob ){ sqlite3_result_error_nomem(ctx); @@ -220750,7 +219425,7 @@ SQLITE_PRIVATE void sqlite3Fts3IcuTokenizerModule( ** ** "RBU" stands for "Resumable Bulk Update". As in a large database update ** transmitted via a wireless network to a mobile device. A transaction -** applied using this extension is hence referred to as an "RBU update". +** applied using this extension is hence refered to as an "RBU update". ** ** ** LIMITATIONS @@ -221047,7 +219722,7 @@ SQLITE_API sqlite3rbu *sqlite3rbu_open( ** the next call to sqlite3rbu_vacuum() opens a handle that starts a ** new RBU vacuum operation. ** -** As with sqlite3rbu_open(), Zipvfs users should refer to the comment +** As with sqlite3rbu_open(), Zipvfs users should rever to the comment ** describing the sqlite3rbu_create_vfs() API function below for ** a description of the complications associated with using RBU with ** zipvfs databases. @@ -221143,7 +219818,7 @@ SQLITE_API int sqlite3rbu_savestate(sqlite3rbu *pRbu); ** ** If the RBU update has been completely applied, mark the RBU database ** as fully applied. Otherwise, assuming no error has occurred, save the -** current state of the RBU update application to the RBU database. +** current state of the RBU update appliation to the RBU database. ** ** If an error has already occurred as part of an sqlite3rbu_step() ** or sqlite3rbu_open() call, or if one occurs within this function, an @@ -226069,7 +224744,7 @@ static int rbuVfsFileSize(sqlite3_file *pFile, sqlite_int64 *pSize){ /* If this is an RBU vacuum operation and this is the target database, ** pretend that it has at least one page. Otherwise, SQLite will not - ** check for the existence of a *-wal file. rbuVfsRead() contains + ** check for the existance of a *-wal file. rbuVfsRead() contains ** similar logic. */ if( rc==SQLITE_OK && *pSize==0 && p->pRbu && rbuIsVacuum(p->pRbu) @@ -228001,8 +226676,8 @@ static int dbpageUpdate( /* "INSERT INTO dbpage($PGNO,NULL)" causes page number $PGNO and ** all subsequent pages to be deleted. */ pTab->iDbTrunc = iDb; - pTab->pgnoTrunc = pgno-1; - pgno = 1; + pgno--; + pTab->pgnoTrunc = pgno; }else{ zErr = "bad page value"; goto update_fail; @@ -229299,7 +227974,7 @@ static int sessionTableInfo( /* ** This function is called to initialize the SessionTable.nCol, azCol[] ** abPK[] and azDflt[] members of SessionTable object pTab. If these -** fields are already initialized, this function is a no-op. +** fields are already initilialized, this function is a no-op. ** ** If an error occurs, an error code is stored in sqlite3_session.rc and ** non-zero returned. Or, if no error occurs but the table has no primary @@ -229318,8 +227993,6 @@ static int sessionInitTable( if( pTab->nCol==0 ){ u8 *abPK; assert( pTab->azCol==0 || pTab->abPK==0 ); - sqlite3_free(pTab->azCol); - pTab->abPK = 0; rc = sessionTableInfo(pSession, db, zDb, pTab->zName, &pTab->nCol, &pTab->nTotalCol, 0, &pTab->azCol, &pTab->azDflt, &pTab->aiIdx, &abPK, @@ -230327,9 +229000,7 @@ SQLITE_API int sqlite3session_diff( SessionTable *pTo; /* Table zTbl */ /* Locate and if necessary initialize the target table object */ - pSession->bAutoAttach++; rc = sessionFindTable(pSession, zTbl, &pTo); - pSession->bAutoAttach--; if( pTo==0 ) goto diff_out; if( sessionInitTable(pSession, pTo, pSession->db, pSession->zDb) ){ rc = pSession->rc; @@ -230340,43 +229011,17 @@ SQLITE_API int sqlite3session_diff( if( rc==SQLITE_OK ){ int bHasPk = 0; int bMismatch = 0; - int nCol = 0; /* Columns in zFrom.zTbl */ + int nCol; /* Columns in zFrom.zTbl */ int bRowid = 0; - u8 *abPK = 0; + u8 *abPK; const char **azCol = 0; - char *zDbExists = 0; - - /* Check that database zFrom is attached. */ - zDbExists = sqlite3_mprintf("SELECT * FROM %Q.sqlite_schema", zFrom); - if( zDbExists==0 ){ - rc = SQLITE_NOMEM; - }else{ - sqlite3_stmt *pDbExists = 0; - rc = sqlite3_prepare_v2(db, zDbExists, -1, &pDbExists, 0); - if( rc==SQLITE_ERROR ){ - rc = SQLITE_OK; - nCol = -1; - } - sqlite3_finalize(pDbExists); - sqlite3_free(zDbExists); - } - - if( rc==SQLITE_OK && nCol==0 ){ - rc = sessionTableInfo(0, db, zFrom, zTbl, - &nCol, 0, 0, &azCol, 0, 0, &abPK, - pSession->bImplicitPK ? &bRowid : 0 - ); - } + rc = sessionTableInfo(0, db, zFrom, zTbl, + &nCol, 0, 0, &azCol, 0, 0, &abPK, + pSession->bImplicitPK ? &bRowid : 0 + ); if( rc==SQLITE_OK ){ if( pTo->nCol!=nCol ){ - if( nCol<=0 ){ - rc = SQLITE_SCHEMA; - if( pzErrMsg ){ - *pzErrMsg = sqlite3_mprintf("no such table: %s.%s", zFrom, zTbl); - } - }else{ - bMismatch = 1; - } + bMismatch = 1; }else{ int i; for(i=0; idb; /* Source database handle */ SessionTable *pTab; /* Used to iterate through attached tables */ - SessionBuffer buf = {0,0,0}; /* Buffer in which to accumulate changeset */ + SessionBuffer buf = {0,0,0}; /* Buffer in which to accumlate changeset */ int rc; /* Return code */ assert( xOutput==0 || (pnChangeset==0 && ppChangeset==0) ); @@ -231505,15 +230150,14 @@ SQLITE_API int sqlite3changeset_start_v2_strm( ** object and the buffer is full, discard some data to free up space. */ static void sessionDiscardData(SessionInput *pIn){ - if( pIn->xInput && pIn->iCurrent>=sessions_strm_chunk_size ){ - int nMove = pIn->buf.nBuf - pIn->iCurrent; + if( pIn->xInput && pIn->iNext>=sessions_strm_chunk_size ){ + int nMove = pIn->buf.nBuf - pIn->iNext; assert( nMove>=0 ); if( nMove>0 ){ - memmove(pIn->buf.aBuf, &pIn->buf.aBuf[pIn->iCurrent], nMove); + memmove(pIn->buf.aBuf, &pIn->buf.aBuf[pIn->iNext], nMove); } - pIn->buf.nBuf -= pIn->iCurrent; - pIn->iNext -= pIn->iCurrent; - pIn->iCurrent = 0; + pIn->buf.nBuf -= pIn->iNext; + pIn->iNext = 0; pIn->nData = pIn->buf.nBuf; } } @@ -231867,8 +230511,8 @@ static int sessionChangesetNextOne( p->rc = sessionInputBuffer(&p->in, 2); if( p->rc!=SQLITE_OK ) return p->rc; - p->in.iCurrent = p->in.iNext; sessionDiscardData(&p->in); + p->in.iCurrent = p->in.iNext; /* If the iterator is already at the end of the changeset, return DONE. */ if( p->in.iNext>=p->in.nData ){ @@ -234227,19 +232871,14 @@ SQLITE_API int sqlite3changegroup_add_change( sqlite3_changegroup *pGrp, sqlite3_changeset_iter *pIter ){ - int rc = SQLITE_OK; - if( pIter->in.iCurrent==pIter->in.iNext || pIter->rc!=SQLITE_OK || pIter->bInvert ){ /* Iterator does not point to any valid entry or is an INVERT iterator. */ - rc = SQLITE_ERROR; - }else{ - pIter->in.bNoDiscard = 1; - rc = sessionOneChangeToHash(pGrp, pIter, 0); + return SQLITE_ERROR; } - return rc; + return sessionOneChangeToHash(pGrp, pIter, 0); } /* @@ -235537,7 +234176,6 @@ SQLITE_EXTENSION_INIT1 /* #include */ /* #include */ -/* #include */ #ifndef SQLITE_AMALGAMATION @@ -235593,18 +234231,6 @@ typedef sqlite3_uint64 u64; # define EIGHT_BYTE_ALIGNMENT(X) ((((uptr)(X) - (uptr)0)&7)==0) #endif -/* -** Macros needed to provide flexible arrays in a portable way -*/ -#ifndef offsetof -# define offsetof(STRUCTURE,FIELD) ((size_t)((char*)&((STRUCTURE*)0)->FIELD)) -#endif -#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) -# define FLEXARRAY -#else -# define FLEXARRAY 1 -#endif - #endif /* Truncate very long tokens to this many bytes. Hard limit is @@ -235677,11 +234303,10 @@ typedef struct Fts5Colset Fts5Colset; */ struct Fts5Colset { int nCol; - int aiCol[FLEXARRAY]; + int aiCol[1]; }; -/* Size (int bytes) of a complete Fts5Colset object with N columns. */ -#define SZ_FTS5COLSET(N) (sizeof(i64)*((N+2)/2)) + /************************************************************************** ** Interface to code in fts5_config.c. fts5_config.c contains contains code @@ -236510,7 +235135,7 @@ static void sqlite3Fts5UnicodeAscii(u8*, u8*); ** ** The "lemon" program processes an LALR(1) input grammar file, then uses ** this template to construct a parser. The "lemon" program inserts text -** at each "%%" line. Also, any "P-a-r-s-e" identifier prefix (without the +** at each "%%" line. Also, any "P-a-r-s-e" identifer prefix (without the ** interstitial "-" characters) contained in this template is changed into ** the value of the %name directive from the grammar. Otherwise, the content ** of this template is copied straight through into the generate parser @@ -238664,7 +237289,7 @@ static int fts5Bm25GetData( ** under consideration. ** ** The problem with this is that if (N < 2*nHit), the IDF is - ** negative. Which is undesirable. So the minimum allowable IDF is + ** negative. Which is undesirable. So the mimimum allowable IDF is ** (1e-6) - roughly the same as a term that appears in just over ** half of set of 5,000,000 documents. */ double idf = log( (nRow - nHit + 0.5) / (nHit + 0.5) ); @@ -239127,7 +237752,7 @@ static char *sqlite3Fts5Strndup(int *pRc, const char *pIn, int nIn){ ** * The 52 upper and lower case ASCII characters, and ** * The 10 integer ASCII characters. ** * The underscore character "_" (0x5F). -** * The unicode "substitute" character (0x1A). +** * The unicode "subsitute" character (0x1A). */ static int sqlite3Fts5IsBareword(char t){ u8 aBareword[128] = { @@ -240445,13 +239070,9 @@ struct Fts5ExprNode { /* Child nodes. For a NOT node, this array always contains 2 entries. For ** AND or OR nodes, it contains 2 or more entries. */ int nChild; /* Number of child nodes */ - Fts5ExprNode *apChild[FLEXARRAY]; /* Array of child nodes */ + Fts5ExprNode *apChild[1]; /* Array of child nodes */ }; -/* Size (in bytes) of an Fts5ExprNode object that holds up to N children */ -#define SZ_FTS5EXPRNODE(N) \ - (offsetof(Fts5ExprNode,apChild) + (N)*sizeof(Fts5ExprNode*)) - #define Fts5NodeIsString(p) ((p)->eType==FTS5_TERM || (p)->eType==FTS5_STRING) /* @@ -240482,13 +239103,9 @@ struct Fts5ExprPhrase { Fts5ExprNode *pNode; /* FTS5_STRING node this phrase is part of */ Fts5Buffer poslist; /* Current position list */ int nTerm; /* Number of entries in aTerm[] */ - Fts5ExprTerm aTerm[FLEXARRAY]; /* Terms that make up this phrase */ + Fts5ExprTerm aTerm[1]; /* Terms that make up this phrase */ }; -/* Size (in bytes) of an Fts5ExprPhrase object that holds up to N terms */ -#define SZ_FTS5EXPRPHRASE(N) \ - (offsetof(Fts5ExprPhrase,aTerm) + (N)*sizeof(Fts5ExprTerm)) - /* ** One or more phrases that must appear within a certain token distance of ** each other within each matching document. @@ -240497,12 +239114,9 @@ struct Fts5ExprNearset { int nNear; /* NEAR parameter */ Fts5Colset *pColset; /* Columns to search (NULL -> all columns) */ int nPhrase; /* Number of entries in aPhrase[] array */ - Fts5ExprPhrase *apPhrase[FLEXARRAY]; /* Array of phrase pointers */ + Fts5ExprPhrase *apPhrase[1]; /* Array of phrase pointers */ }; -/* Size (in bytes) of an Fts5ExprNearset object covering up to N phrases */ -#define SZ_FTS5EXPRNEARSET(N) \ - (offsetof(Fts5ExprNearset,apPhrase)+(N)*sizeof(Fts5ExprPhrase*)) /* ** Parse context. @@ -240662,7 +239276,7 @@ static int sqlite3Fts5ExprNew( /* If the LHS of the MATCH expression was a user column, apply the ** implicit column-filter. */ if( sParse.rc==SQLITE_OK && iColnCol ){ - int n = SZ_FTS5COLSET(1); + int n = sizeof(Fts5Colset); Fts5Colset *pColset = (Fts5Colset*)sqlite3Fts5MallocZero(&sParse.rc, n); if( pColset ){ pColset->nCol = 1; @@ -242020,7 +240634,7 @@ static Fts5ExprNearset *sqlite3Fts5ParseNearset( if( pParse->rc==SQLITE_OK ){ if( pNear==0 ){ sqlite3_int64 nByte; - nByte = SZ_FTS5EXPRNEARSET(SZALLOC+1); + nByte = sizeof(Fts5ExprNearset) + SZALLOC * sizeof(Fts5ExprPhrase*); pRet = sqlite3_malloc64(nByte); if( pRet==0 ){ pParse->rc = SQLITE_NOMEM; @@ -242031,7 +240645,7 @@ static Fts5ExprNearset *sqlite3Fts5ParseNearset( int nNew = pNear->nPhrase + SZALLOC; sqlite3_int64 nByte; - nByte = SZ_FTS5EXPRNEARSET(nNew+1); + nByte = sizeof(Fts5ExprNearset) + nNew * sizeof(Fts5ExprPhrase*); pRet = (Fts5ExprNearset*)sqlite3_realloc64(pNear, nByte); if( pRet==0 ){ pParse->rc = SQLITE_NOMEM; @@ -242122,12 +240736,12 @@ static int fts5ParseTokenize( int nNew = SZALLOC + (pPhrase ? pPhrase->nTerm : 0); pNew = (Fts5ExprPhrase*)sqlite3_realloc64(pPhrase, - SZ_FTS5EXPRPHRASE(nNew+1) + sizeof(Fts5ExprPhrase) + sizeof(Fts5ExprTerm) * nNew ); if( pNew==0 ){ rc = SQLITE_NOMEM; }else{ - if( pPhrase==0 ) memset(pNew, 0, SZ_FTS5EXPRPHRASE(1)); + if( pPhrase==0 ) memset(pNew, 0, sizeof(Fts5ExprPhrase)); pCtx->pPhrase = pPhrase = pNew; pNew->nTerm = nNew - SZALLOC; } @@ -242235,7 +240849,7 @@ static Fts5ExprPhrase *sqlite3Fts5ParseTerm( if( sCtx.pPhrase==0 ){ /* This happens when parsing a token or quoted phrase that contains ** no token characters at all. (e.g ... MATCH '""'). */ - sCtx.pPhrase = sqlite3Fts5MallocZero(&pParse->rc, SZ_FTS5EXPRPHRASE(1)); + sCtx.pPhrase = sqlite3Fts5MallocZero(&pParse->rc, sizeof(Fts5ExprPhrase)); }else if( sCtx.pPhrase->nTerm ){ sCtx.pPhrase->aTerm[sCtx.pPhrase->nTerm-1].bPrefix = (u8)bPrefix; } @@ -242270,18 +240884,19 @@ static int sqlite3Fts5ExprClonePhrase( sizeof(Fts5ExprPhrase*)); } if( rc==SQLITE_OK ){ - pNew->pRoot = (Fts5ExprNode*)sqlite3Fts5MallocZero(&rc, SZ_FTS5EXPRNODE(1)); + pNew->pRoot = (Fts5ExprNode*)sqlite3Fts5MallocZero(&rc, + sizeof(Fts5ExprNode)); } if( rc==SQLITE_OK ){ pNew->pRoot->pNear = (Fts5ExprNearset*)sqlite3Fts5MallocZero(&rc, - SZ_FTS5EXPRNEARSET(2)); + sizeof(Fts5ExprNearset) + sizeof(Fts5ExprPhrase*)); } if( rc==SQLITE_OK && ALWAYS(pOrig!=0) ){ Fts5Colset *pColsetOrig = pOrig->pNode->pNear->pColset; if( pColsetOrig ){ sqlite3_int64 nByte; Fts5Colset *pColset; - nByte = SZ_FTS5COLSET(pColsetOrig->nCol); + nByte = sizeof(Fts5Colset) + (pColsetOrig->nCol-1) * sizeof(int); pColset = (Fts5Colset*)sqlite3Fts5MallocZero(&rc, nByte); if( pColset ){ memcpy(pColset, pColsetOrig, (size_t)nByte); @@ -242309,7 +240924,7 @@ static int sqlite3Fts5ExprClonePhrase( }else{ /* This happens when parsing a token or quoted phrase that contains ** no token characters at all. (e.g ... MATCH '""'). */ - sCtx.pPhrase = sqlite3Fts5MallocZero(&rc, SZ_FTS5EXPRPHRASE(1)); + sCtx.pPhrase = sqlite3Fts5MallocZero(&rc, sizeof(Fts5ExprPhrase)); } } @@ -242374,8 +240989,7 @@ static void sqlite3Fts5ParseSetDistance( ); return; } - if( nNear<214748363 ) nNear = nNear * 10 + (p->p[i] - '0'); - /* ^^^^^^^^^^^^^^^--- Prevent integer overflow */ + nNear = nNear * 10 + (p->p[i] - '0'); } }else{ nNear = FTS5_DEFAULT_NEARDIST; @@ -242404,7 +241018,7 @@ static Fts5Colset *fts5ParseColset( assert( pParse->rc==SQLITE_OK ); assert( iCol>=0 && iColpConfig->nCol ); - pNew = sqlite3_realloc64(p, SZ_FTS5COLSET(nCol+1)); + pNew = sqlite3_realloc64(p, sizeof(Fts5Colset) + sizeof(int)*nCol); if( pNew==0 ){ pParse->rc = SQLITE_NOMEM; }else{ @@ -242439,7 +241053,7 @@ static Fts5Colset *sqlite3Fts5ParseColsetInvert(Fts5Parse *pParse, Fts5Colset *p int nCol = pParse->pConfig->nCol; pRet = (Fts5Colset*)sqlite3Fts5MallocZero(&pParse->rc, - SZ_FTS5COLSET(nCol+1) + sizeof(Fts5Colset) + sizeof(int)*nCol ); if( pRet ){ int i; @@ -242500,7 +241114,7 @@ static Fts5Colset *sqlite3Fts5ParseColset( static Fts5Colset *fts5CloneColset(int *pRc, Fts5Colset *pOrig){ Fts5Colset *pRet; if( pOrig ){ - sqlite3_int64 nByte = SZ_FTS5COLSET(pOrig->nCol); + sqlite3_int64 nByte = sizeof(Fts5Colset) + (pOrig->nCol-1) * sizeof(int); pRet = (Fts5Colset*)sqlite3Fts5MallocZero(pRc, nByte); if( pRet ){ memcpy(pRet, pOrig, (size_t)nByte); @@ -242668,7 +241282,7 @@ static Fts5ExprNode *fts5ParsePhraseToAnd( assert( pNear->nPhrase==1 ); assert( pParse->bPhraseToAnd ); - nByte = SZ_FTS5EXPRNODE(nTerm+1); + nByte = sizeof(Fts5ExprNode) + nTerm*sizeof(Fts5ExprNode*); pRet = (Fts5ExprNode*)sqlite3Fts5MallocZero(&pParse->rc, nByte); if( pRet ){ pRet->eType = FTS5_AND; @@ -242678,7 +241292,7 @@ static Fts5ExprNode *fts5ParsePhraseToAnd( pParse->nPhrase--; for(ii=0; iirc, SZ_FTS5EXPRPHRASE(1) + &pParse->rc, sizeof(Fts5ExprPhrase) ); if( pPhrase ){ if( parseGrowPhraseArray(pParse) ){ @@ -242747,7 +241361,7 @@ static Fts5ExprNode *sqlite3Fts5ParseNode( if( pRight->eType==eType ) nChild += pRight->nChild-1; } - nByte = SZ_FTS5EXPRNODE(nChild); + nByte = sizeof(Fts5ExprNode) + sizeof(Fts5ExprNode*)*(nChild-1); pRet = (Fts5ExprNode*)sqlite3Fts5MallocZero(&pParse->rc, nByte); if( pRet ){ @@ -243622,7 +242236,7 @@ static int sqlite3Fts5ExprInstToken( } /* -** Clear the token mappings for all Fts5IndexIter objects managed by +** Clear the token mappings for all Fts5IndexIter objects mannaged by ** the expression passed as the only argument. */ static void sqlite3Fts5ExprClearTokens(Fts5Expr *pExpr){ @@ -243657,7 +242271,7 @@ typedef struct Fts5HashEntry Fts5HashEntry; /* ** This file contains the implementation of an in-memory hash table used -** to accumulate "term -> doclist" content before it is flushed to a level-0 +** to accumuluate "term -> doclist" content before it is flused to a level-0 ** segment. */ @@ -243714,7 +242328,7 @@ struct Fts5HashEntry { }; /* -** Equivalent to: +** Eqivalent to: ** ** char *fts5EntryKey(Fts5HashEntry *pEntry){ return zKey; } */ @@ -244650,13 +243264,9 @@ struct Fts5Structure { u64 nOriginCntr; /* Origin value for next top-level segment */ int nSegment; /* Total segments in this structure */ int nLevel; /* Number of levels in this index */ - Fts5StructureLevel aLevel[FLEXARRAY]; /* Array of nLevel level objects */ + Fts5StructureLevel aLevel[1]; /* Array of nLevel level objects */ }; -/* Size (in bytes) of an Fts5Structure object holding up to N levels */ -#define SZ_FTS5STRUCTURE(N) \ - (offsetof(Fts5Structure,aLevel) + (N)*sizeof(Fts5StructureLevel)) - /* ** An object of type Fts5SegWriter is used to write to segments. */ @@ -244786,15 +243396,11 @@ struct Fts5SegIter { ** Array of tombstone pages. Reference counted. */ struct Fts5TombstoneArray { - int nRef; /* Number of pointers to this object */ + int nRef; /* Number of pointers to this object */ int nTombstone; - Fts5Data *apTombstone[FLEXARRAY]; /* Array of tombstone pages */ + Fts5Data *apTombstone[1]; /* Array of tombstone pages */ }; -/* Size (in bytes) of an Fts5TombstoneArray holding up to N tombstones */ -#define SZ_FTS5TOMBSTONEARRAY(N) \ - (offsetof(Fts5TombstoneArray,apTombstone)+(N)*sizeof(Fts5Data*)) - /* ** Argument is a pointer to an Fts5Data structure that contains a ** leaf page. @@ -244863,12 +243469,9 @@ struct Fts5Iter { i64 iSwitchRowid; /* Firstest rowid of other than aFirst[1] */ Fts5CResult *aFirst; /* Current merge state (see above) */ - Fts5SegIter aSeg[FLEXARRAY]; /* Array of segment iterators */ + Fts5SegIter aSeg[1]; /* Array of segment iterators */ }; -/* Size (in bytes) of an Fts5Iter object holding up to N segment iterators */ -#define SZ_FTS5ITER(N) (offsetof(Fts5Iter,aSeg)+(N)*sizeof(Fts5SegIter)) - /* ** An instance of the following type is used to iterate through the contents ** of a doclist-index record. @@ -244895,13 +243498,9 @@ struct Fts5DlidxLvl { struct Fts5DlidxIter { int nLvl; int iSegid; - Fts5DlidxLvl aLvl[FLEXARRAY]; + Fts5DlidxLvl aLvl[1]; }; -/* Size (in bytes) of an Fts5DlidxIter object with up to N levels */ -#define SZ_FTS5DLIDXITER(N) \ - (offsetof(Fts5DlidxIter,aLvl)+(N)*sizeof(Fts5DlidxLvl)) - static void fts5PutU16(u8 *aOut, u16 iVal){ aOut[0] = (iVal>>8); aOut[1] = (iVal&0xFF); @@ -245269,7 +243868,7 @@ static int sqlite3Fts5StructureTest(Fts5Index *p, void *pStruct){ static void fts5StructureMakeWritable(int *pRc, Fts5Structure **pp){ Fts5Structure *p = *pp; if( *pRc==SQLITE_OK && p->nRef>1 ){ - i64 nByte = SZ_FTS5STRUCTURE(p->nLevel); + i64 nByte = sizeof(Fts5Structure)+(p->nLevel-1)*sizeof(Fts5StructureLevel); Fts5Structure *pNew; pNew = (Fts5Structure*)sqlite3Fts5MallocZero(pRc, nByte); if( pNew ){ @@ -245343,7 +243942,10 @@ static int fts5StructureDecode( ){ return FTS5_CORRUPT; } - nByte = SZ_FTS5STRUCTURE(nLevel); + nByte = ( + sizeof(Fts5Structure) + /* Main structure */ + sizeof(Fts5StructureLevel) * (nLevel-1) /* aLevel[] array */ + ); pRet = (Fts5Structure*)sqlite3Fts5MallocZero(&rc, nByte); if( pRet ){ @@ -245423,7 +244025,10 @@ static void fts5StructureAddLevel(int *pRc, Fts5Structure **ppStruct){ if( *pRc==SQLITE_OK ){ Fts5Structure *pStruct = *ppStruct; int nLevel = pStruct->nLevel; - sqlite3_int64 nByte = SZ_FTS5STRUCTURE(nLevel+2); + sqlite3_int64 nByte = ( + sizeof(Fts5Structure) + /* Main structure */ + sizeof(Fts5StructureLevel) * (nLevel+1) /* aLevel[] array */ + ); pStruct = sqlite3_realloc64(pStruct, nByte); if( pStruct ){ @@ -245962,7 +244567,7 @@ static Fts5DlidxIter *fts5DlidxIterInit( int bDone = 0; for(i=0; p->rc==SQLITE_OK && bDone==0; i++){ - sqlite3_int64 nByte = SZ_FTS5DLIDXITER(i+1); + sqlite3_int64 nByte = sizeof(Fts5DlidxIter) + i * sizeof(Fts5DlidxLvl); Fts5DlidxIter *pNew; pNew = (Fts5DlidxIter*)sqlite3_realloc64(pIter, nByte); @@ -246178,9 +244783,9 @@ static void fts5SegIterSetNext(Fts5Index *p, Fts5SegIter *pIter){ ** leave an error in the Fts5Index object. */ static void fts5SegIterAllocTombstone(Fts5Index *p, Fts5SegIter *pIter){ - const i64 nTomb = (i64)pIter->pSeg->nPgTombstone; + const int nTomb = pIter->pSeg->nPgTombstone; if( nTomb>0 ){ - i64 nByte = SZ_FTS5TOMBSTONEARRAY(nTomb+1); + int nByte = nTomb * sizeof(Fts5Data*) + sizeof(Fts5TombstoneArray); Fts5TombstoneArray *pNew; pNew = (Fts5TombstoneArray*)sqlite3Fts5MallocZero(&p->rc, nByte); if( pNew ){ @@ -247641,7 +246246,8 @@ static Fts5Iter *fts5MultiIterAlloc( for(nSlot=2; nSlotaSeg[] */ + sizeof(Fts5Iter) + /* pNew */ + sizeof(Fts5SegIter) * (nSlot-1) + /* pNew->aSeg[] */ sizeof(Fts5CResult) * nSlot /* pNew->aFirst[] */ ); if( pNew ){ @@ -249442,7 +248048,7 @@ static void fts5DoSecureDelete( int iDelKeyOff = 0; /* Offset of deleted key, if any */ nIdx = nPg-iPgIdx; - aIdx = sqlite3Fts5MallocZero(&p->rc, ((i64)nIdx)+16); + aIdx = sqlite3Fts5MallocZero(&p->rc, nIdx+16); if( p->rc ) return; memcpy(aIdx, &aPg[iPgIdx], nIdx); @@ -250007,7 +248613,7 @@ static Fts5Structure *fts5IndexOptimizeStruct( Fts5Structure *pStruct ){ Fts5Structure *pNew = 0; - sqlite3_int64 nByte = SZ_FTS5STRUCTURE(1); + sqlite3_int64 nByte = sizeof(Fts5Structure); int nSeg = pStruct->nSegment; int i; @@ -250036,8 +248642,7 @@ static Fts5Structure *fts5IndexOptimizeStruct( assert( pStruct->aLevel[i].nMerge<=nThis ); } - nByte += (((i64)pStruct->nLevel)+1) * sizeof(Fts5StructureLevel); - assert( nByte==SZ_FTS5STRUCTURE(pStruct->nLevel+2) ); + nByte += (pStruct->nLevel+1) * sizeof(Fts5StructureLevel); pNew = (Fts5Structure*)sqlite3Fts5MallocZero(&p->rc, nByte); if( pNew ){ @@ -250614,13 +249219,9 @@ struct Fts5TokenDataIter { int nIterAlloc; Fts5PoslistReader *aPoslistReader; int *aPoslistToIter; - Fts5Iter *apIter[FLEXARRAY]; + Fts5Iter *apIter[1]; }; -/* Size in bytes of an Fts5TokenDataIter object holding up to N iterators */ -#define SZ_FTS5TOKENDATAITER(N) \ - (offsetof(Fts5TokenDataIter,apIter) + (N)*sizeof(Fts5Iter)) - /* ** The two input arrays - a1[] and a2[] - are in sorted order. This function ** merges the two arrays together and writes the result to output array @@ -250692,7 +249293,7 @@ static void fts5TokendataIterAppendMap( /* ** Sort the contents of the pT->aMap[] array. ** -** The sorting algorithm requires a malloc(). If this fails, an error code +** The sorting algorithm requries a malloc(). If this fails, an error code ** is left in Fts5Index.rc before returning. */ static void fts5TokendataIterSortMap(Fts5Index *p, Fts5TokenDataIter *pT){ @@ -250883,7 +249484,7 @@ static void fts5SetupPrefixIter( && p->pConfig->bPrefixInsttoken ){ s.pTokendata = &s2; - s2.pT = (Fts5TokenDataIter*)fts5IdxMalloc(p, SZ_FTS5TOKENDATAITER(1)); + s2.pT = (Fts5TokenDataIter*)fts5IdxMalloc(p, sizeof(*s2.pT)); } if( p->pConfig->eDetail==FTS5_DETAIL_NONE ){ @@ -250929,8 +249530,7 @@ static void fts5SetupPrefixIter( } } - pData = fts5IdxMalloc(p, sizeof(*pData) - + ((i64)s.doclist.n)+FTS5_DATA_ZERO_PADDING); + pData = fts5IdxMalloc(p, sizeof(*pData)+s.doclist.n+FTS5_DATA_ZERO_PADDING); assert( pData!=0 || p->rc!=SQLITE_OK ); if( pData ){ pData->p = (u8*)&pData[1]; @@ -251011,17 +249611,15 @@ static int sqlite3Fts5IndexRollback(Fts5Index *p){ ** and the initial version of the "averages" record (a zero-byte blob). */ static int sqlite3Fts5IndexReinit(Fts5Index *p){ - Fts5Structure *pTmp; - u8 tmpSpace[SZ_FTS5STRUCTURE(1)]; + Fts5Structure s; fts5StructureInvalidate(p); fts5IndexDiscardData(p); - pTmp = (Fts5Structure*)tmpSpace; - memset(pTmp, 0, SZ_FTS5STRUCTURE(1)); + memset(&s, 0, sizeof(Fts5Structure)); if( p->pConfig->bContentlessDelete ){ - pTmp->nOriginCntr = 1; + s.nOriginCntr = 1; } fts5DataWrite(p, FTS5_AVERAGES_ROWID, (const u8*)"", 0); - fts5StructureWrite(p, pTmp); + fts5StructureWrite(p, &s); return fts5IndexReturn(p); } @@ -251229,7 +249827,7 @@ static Fts5TokenDataIter *fts5AppendTokendataIter( if( p->rc==SQLITE_OK ){ if( pIn==0 || pIn->nIter==pIn->nIterAlloc ){ int nAlloc = pIn ? pIn->nIterAlloc*2 : 16; - int nByte = SZ_FTS5TOKENDATAITER(nAlloc+1); + int nByte = nAlloc * sizeof(Fts5Iter*) + sizeof(Fts5TokenDataIter); Fts5TokenDataIter *pNew = (Fts5TokenDataIter*)sqlite3_realloc(pIn, nByte); if( pNew==0 ){ @@ -251745,8 +250343,7 @@ static int fts5SetupPrefixIterTokendata( fts5BufferGrow(&p->rc, &token, nToken+1); assert( token.p!=0 || p->rc!=SQLITE_OK ); - ctx.pT = (Fts5TokenDataIter*)sqlite3Fts5MallocZero(&p->rc, - SZ_FTS5TOKENDATAITER(1)); + ctx.pT = (Fts5TokenDataIter*)sqlite3Fts5MallocZero(&p->rc, sizeof(*ctx.pT)); if( p->rc==SQLITE_OK ){ @@ -251877,8 +250474,7 @@ static int sqlite3Fts5IndexIterWriteTokendata( if( pIter->nSeg>0 ){ /* This is a prefix term iterator. */ if( pT==0 ){ - pT = (Fts5TokenDataIter*)sqlite3Fts5MallocZero(&p->rc, - SZ_FTS5TOKENDATAITER(1)); + pT = (Fts5TokenDataIter*)sqlite3Fts5MallocZero(&p->rc, sizeof(*pT)); pIter->pTokenDataIter = pT; } if( pT ){ @@ -252912,7 +251508,7 @@ static void fts5DecodeRowid( #if defined(SQLITE_TEST) || defined(SQLITE_FTS5_DEBUG) static void fts5DebugRowid(int *pRc, Fts5Buffer *pBuf, i64 iKey){ - int iSegid, iHeight, iPgno, bDlidx, bTomb; /* Rowid components */ + int iSegid, iHeight, iPgno, bDlidx, bTomb; /* Rowid compenents */ fts5DecodeRowid(iKey, &bTomb, &iSegid, &bDlidx, &iHeight, &iPgno); if( iSegid==0 ){ @@ -253158,7 +251754,7 @@ static void fts5DecodeFunction( ** buffer overreads even if the record is corrupt. */ n = sqlite3_value_bytes(apVal[1]); aBlob = sqlite3_value_blob(apVal[1]); - nSpace = ((i64)n) + FTS5_DATA_ZERO_PADDING; + nSpace = n + FTS5_DATA_ZERO_PADDING; a = (u8*)sqlite3Fts5MallocZero(&rc, nSpace); if( a==0 ) goto decode_out; if( n>0 ) memcpy(a, aBlob, n); @@ -253873,11 +252469,9 @@ struct Fts5Sorter { i64 iRowid; /* Current rowid */ const u8 *aPoslist; /* Position lists for current row */ int nIdx; /* Number of entries in aIdx[] */ - int aIdx[FLEXARRAY]; /* Offsets into aPoslist for current row */ + int aIdx[1]; /* Offsets into aPoslist for current row */ }; -/* Size (int bytes) of an Fts5Sorter object with N indexes */ -#define SZ_FTS5SORTER(N) (offsetof(Fts5Sorter,nIdx)+((N+2)/2)*sizeof(i64)) /* ** Virtual-table cursor object. @@ -254755,7 +253349,7 @@ static int fts5CursorFirstSorted( const char *zRankArgs = pCsr->zRankArgs; nPhrase = sqlite3Fts5ExprPhraseCount(pCsr->pExpr); - nByte = SZ_FTS5SORTER(nPhrase); + nByte = sizeof(Fts5Sorter) + sizeof(int) * (nPhrase-1); pSorter = (Fts5Sorter*)sqlite3_malloc64(nByte); if( pSorter==0 ) return SQLITE_NOMEM; memset(pSorter, 0, (size_t)nByte); @@ -257281,7 +255875,7 @@ static void fts5SourceIdFunc( ){ assert( nArg==0 ); UNUSED_PARAM2(nArg, apUnused); - sqlite3_result_text(pCtx, "fts5: 2025-07-17 13:25:10 3ce993b8657d6d9deda380a93cdd6404a8c8ba1b185b2bc423703e41ae5f2543", -1, SQLITE_TRANSIENT); + sqlite3_result_text(pCtx, "fts5: 2025-02-18 13:38:58 873d4e274b4988d260ba8354a9718324a1c26187a4ab4c1cc0227c03d0f10e70", -1, SQLITE_TRANSIENT); } /* @@ -257506,8 +256100,8 @@ static int fts5Init(sqlite3 *db){ ** its entry point to enable the matchinfo() demo. */ #ifdef SQLITE_FTS5_ENABLE_TEST_MI if( rc==SQLITE_OK ){ - extern int sqlite3Fts5TestRegisterMatchinfoAPI(fts5_api*); - rc = sqlite3Fts5TestRegisterMatchinfoAPI(&pGlobal->api); + extern int sqlite3Fts5TestRegisterMatchinfo(sqlite3*); + rc = sqlite3Fts5TestRegisterMatchinfo(db); } #endif @@ -258096,7 +256690,6 @@ static int fts5StorageDeleteFromIndex( for(iCol=1; rc==SQLITE_OK && iCol<=pConfig->nCol; iCol++){ if( pConfig->abUnindexed[iCol-1]==0 ){ sqlite3_value *pVal = 0; - sqlite3_value *pFree = 0; const char *pText = 0; int nText = 0; const char *pLoc = 0; @@ -258113,22 +256706,11 @@ static int fts5StorageDeleteFromIndex( if( pConfig->bLocale && sqlite3Fts5IsLocaleValue(pConfig, pVal) ){ rc = sqlite3Fts5DecodeLocaleValue(pVal, &pText, &nText, &pLoc, &nLoc); }else{ - if( sqlite3_value_type(pVal)!=SQLITE_TEXT ){ - /* Make a copy of the value to work with. This is because the call - ** to sqlite3_value_text() below forces the type of the value to - ** SQLITE_TEXT, and we may need to use it again later. */ - pFree = pVal = sqlite3_value_dup(pVal); - if( pVal==0 ){ - rc = SQLITE_NOMEM; - } - } - if( rc==SQLITE_OK ){ - pText = (const char*)sqlite3_value_text(pVal); - nText = sqlite3_value_bytes(pVal); - if( pConfig->bLocale && pSeek ){ - pLoc = (const char*)sqlite3_column_text(pSeek, iCol+pConfig->nCol); - nLoc = sqlite3_column_bytes(pSeek, iCol + pConfig->nCol); - } + pText = (const char*)sqlite3_value_text(pVal); + nText = sqlite3_value_bytes(pVal); + if( pConfig->bLocale && pSeek ){ + pLoc = (const char*)sqlite3_column_text(pSeek, iCol + pConfig->nCol); + nLoc = sqlite3_column_bytes(pSeek, iCol + pConfig->nCol); } } @@ -258144,7 +256726,6 @@ static int fts5StorageDeleteFromIndex( } sqlite3Fts5ClearLocale(pConfig); } - sqlite3_value_free(pFree); } } if( rc==SQLITE_OK && p->nTotalRow<1 ){ @@ -261358,6 +259939,7 @@ static void sqlite3Fts5UnicodeAscii(u8 *aArray, u8 *aAscii){ aAscii[0] = 0; /* 0x00 is never a token character */ } + /* ** 2015 May 30 ** @@ -261898,12 +260480,12 @@ static int fts5VocabInitVtab( *pzErr = sqlite3_mprintf("wrong number of vtable arguments"); rc = SQLITE_ERROR; }else{ - i64 nByte; /* Bytes of space to allocate */ + int nByte; /* Bytes of space to allocate */ const char *zDb = bDb ? argv[3] : argv[1]; const char *zTab = bDb ? argv[4] : argv[3]; const char *zType = bDb ? argv[5] : argv[4]; - i64 nDb = strlen(zDb)+1; - i64 nTab = strlen(zTab)+1; + int nDb = (int)strlen(zDb)+1; + int nTab = (int)strlen(zTab)+1; int eType = 0; rc = fts5VocabTableType(zType, pzErr, &eType); diff --git a/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.h b/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.h index d65d949a..5e07ce68 100644 --- a/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.h +++ b/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.h @@ -134,7 +134,7 @@ extern "C" { ** ** Since [version 3.6.18] ([dateof:3.6.18]), ** SQLite source code has been stored in the -** Fossil configuration management +** Fossil configuration management ** system. ^The SQLITE_SOURCE_ID macro evaluates to ** a string which identifies a particular check-in of SQLite ** within its configuration management system. ^The SQLITE_SOURCE_ID @@ -147,9 +147,9 @@ extern "C" { ** [sqlite3_libversion_number()], [sqlite3_sourceid()], ** [sqlite_version()] and [sqlite_source_id()]. */ -#define SQLITE_VERSION "3.50.3" -#define SQLITE_VERSION_NUMBER 3050003 -#define SQLITE_SOURCE_ID "2025-07-17 13:25:10 3ce993b8657d6d9deda380a93cdd6404a8c8ba1b185b2bc423703e41ae5f2543" +#define SQLITE_VERSION "3.49.1" +#define SQLITE_VERSION_NUMBER 3049001 +#define SQLITE_SOURCE_ID "2025-02-18 13:38:58 873d4e274b4988d260ba8354a9718324a1c26187a4ab4c1cc0227c03d0f10e70" /* ** CAPI3REF: Run-Time Library Version Numbers @@ -1164,12 +1164,6 @@ struct sqlite3_io_methods { ** the value that M is to be set to. Before returning, the 32-bit signed ** integer is overwritten with the previous value of M. ** -**
        • [[SQLITE_FCNTL_BLOCK_ON_CONNECT]] -** The [SQLITE_FCNTL_BLOCK_ON_CONNECT] opcode is used to configure the -** VFS to block when taking a SHARED lock to connect to a wal mode database. -** This is used to implement the functionality associated with -** SQLITE_SETLK_BLOCK_ON_CONNECT. -** **
        • [[SQLITE_FCNTL_DATA_VERSION]] ** The [SQLITE_FCNTL_DATA_VERSION] opcode is used to detect changes to ** a database file. The argument is a pointer to a 32-bit unsigned integer. @@ -1266,7 +1260,6 @@ struct sqlite3_io_methods { #define SQLITE_FCNTL_CKSM_FILE 41 #define SQLITE_FCNTL_RESET_CACHE 42 #define SQLITE_FCNTL_NULL_IO 43 -#define SQLITE_FCNTL_BLOCK_ON_CONNECT 44 /* deprecated names */ #define SQLITE_GET_LOCKPROXYFILE SQLITE_FCNTL_GET_LOCKPROXYFILE @@ -1997,16 +1990,13 @@ struct sqlite3_mem_methods { ** ** [[SQLITE_CONFIG_LOOKASIDE]]
          SQLITE_CONFIG_LOOKASIDE
          **
          ^(The SQLITE_CONFIG_LOOKASIDE option takes two arguments that determine -** the default size of [lookaside memory] on each [database connection]. +** the default size of lookaside memory on each [database connection]. ** The first argument is the -** size of each lookaside buffer slot ("sz") and the second is the number of -** slots allocated to each database connection ("cnt").)^ -** ^(SQLITE_CONFIG_LOOKASIDE sets the default lookaside size. -** The [SQLITE_DBCONFIG_LOOKASIDE] option to [sqlite3_db_config()] can -** be used to change the lookaside configuration on individual connections.)^ -** The [-DSQLITE_DEFAULT_LOOKASIDE] option can be used to change the -** default lookaside configuration at compile-time. -**
          +** size of each lookaside buffer slot and the second is the number of +** slots allocated to each database connection.)^ ^(SQLITE_CONFIG_LOOKASIDE +** sets the default lookaside size. The [SQLITE_DBCONFIG_LOOKASIDE] +** option to [sqlite3_db_config()] can be used to change the lookaside +** configuration on individual connections.)^
        • ** ** [[SQLITE_CONFIG_PCACHE2]]
          SQLITE_CONFIG_PCACHE2
          **
          ^(The SQLITE_CONFIG_PCACHE2 option takes a single argument which is @@ -2243,50 +2233,31 @@ struct sqlite3_mem_methods { ** [[SQLITE_DBCONFIG_LOOKASIDE]] **
          SQLITE_DBCONFIG_LOOKASIDE
          **
          The SQLITE_DBCONFIG_LOOKASIDE option is used to adjust the -** configuration of the [lookaside memory allocator] within a database +** configuration of the lookaside memory allocator within a database ** connection. ** The arguments to the SQLITE_DBCONFIG_LOOKASIDE option are not ** in the [DBCONFIG arguments|usual format]. ** The SQLITE_DBCONFIG_LOOKASIDE option takes three arguments, not two, ** so that a call to [sqlite3_db_config()] that uses SQLITE_DBCONFIG_LOOKASIDE ** should have a total of five parameters. -**
            -**
          1. The first argument ("buf") is a +** ^The first argument (the third parameter to [sqlite3_db_config()] is a ** pointer to a memory buffer to use for lookaside memory. -** The first argument may be NULL in which case SQLite will allocate the -** lookaside buffer itself using [sqlite3_malloc()]. -**

          2. The second argument ("sz") is the -** size of each lookaside buffer slot. Lookaside is disabled if "sz" -** is less than 8. The "sz" argument should be a multiple of 8 less than -** 65536. If "sz" does not meet this constraint, it is reduced in size until -** it does. -**

          3. The third argument ("cnt") is the number of slots. Lookaside is disabled -** if "cnt"is less than 1. The "cnt" value will be reduced, if necessary, so -** that the product of "sz" and "cnt" does not exceed 2,147,418,112. The "cnt" -** parameter is usually chosen so that the product of "sz" and "cnt" is less -** than 1,000,000. -**

          -**

          If the "buf" argument is not NULL, then it must -** point to a memory buffer with a size that is greater than -** or equal to the product of "sz" and "cnt". -** The buffer must be aligned to an 8-byte boundary. -** The lookaside memory +** ^The first argument after the SQLITE_DBCONFIG_LOOKASIDE verb +** may be NULL in which case SQLite will allocate the +** lookaside buffer itself using [sqlite3_malloc()]. ^The second argument is the +** size of each lookaside buffer slot. ^The third argument is the number of +** slots. The size of the buffer in the first argument must be greater than +** or equal to the product of the second and third arguments. The buffer +** must be aligned to an 8-byte boundary. ^If the second argument to +** SQLITE_DBCONFIG_LOOKASIDE is not a multiple of 8, it is internally +** rounded down to the next smaller multiple of 8. ^(The lookaside memory ** configuration for a database connection can only be changed when that ** connection is not currently using lookaside memory, or in other words -** when the value returned by [SQLITE_DBSTATUS_LOOKASIDE_USED] is zero. +** when the "current value" returned by +** [sqlite3_db_status](D,[SQLITE_DBSTATUS_LOOKASIDE_USED],...) is zero. ** Any attempt to change the lookaside memory configuration when lookaside ** memory is in use leaves the configuration unchanged and returns -** [SQLITE_BUSY]. -** If the "buf" argument is NULL and an attempt -** to allocate memory based on "sz" and "cnt" fails, then -** lookaside is silently disabled. -**

          -** The [SQLITE_CONFIG_LOOKASIDE] configuration option can be used to set the -** default lookaside configuration at initialization. The -** [-DSQLITE_DEFAULT_LOOKASIDE] option can be used to set the default lookaside -** configuration at compile-time. Typical values for lookaside are 1200 for -** "sz" and 40 to 100 for "cnt". -**

          +** [SQLITE_BUSY].)^ ** ** [[SQLITE_DBCONFIG_ENABLE_FKEY]] **
          SQLITE_DBCONFIG_ENABLE_FKEY
          @@ -3023,44 +2994,6 @@ SQLITE_API int sqlite3_busy_handler(sqlite3*,int(*)(void*,int),void*); */ SQLITE_API int sqlite3_busy_timeout(sqlite3*, int ms); -/* -** CAPI3REF: Set the Setlk Timeout -** METHOD: sqlite3 -** -** This routine is only useful in SQLITE_ENABLE_SETLK_TIMEOUT builds. If -** the VFS supports blocking locks, it sets the timeout in ms used by -** eligible locks taken on wal mode databases by the specified database -** handle. In non-SQLITE_ENABLE_SETLK_TIMEOUT builds, or if the VFS does -** not support blocking locks, this function is a no-op. -** -** Passing 0 to this function disables blocking locks altogether. Passing -** -1 to this function requests that the VFS blocks for a long time - -** indefinitely if possible. The results of passing any other negative value -** are undefined. -** -** Internally, each SQLite database handle store two timeout values - the -** busy-timeout (used for rollback mode databases, or if the VFS does not -** support blocking locks) and the setlk-timeout (used for blocking locks -** on wal-mode databases). The sqlite3_busy_timeout() method sets both -** values, this function sets only the setlk-timeout value. Therefore, -** to configure separate busy-timeout and setlk-timeout values for a single -** database handle, call sqlite3_busy_timeout() followed by this function. -** -** Whenever the number of connections to a wal mode database falls from -** 1 to 0, the last connection takes an exclusive lock on the database, -** then checkpoints and deletes the wal file. While it is doing this, any -** new connection that tries to read from the database fails with an -** SQLITE_BUSY error. Or, if the SQLITE_SETLK_BLOCK_ON_CONNECT flag is -** passed to this API, the new connection blocks until the exclusive lock -** has been released. -*/ -SQLITE_API int sqlite3_setlk_timeout(sqlite3*, int ms, int flags); - -/* -** CAPI3REF: Flags for sqlite3_setlk_timeout() -*/ -#define SQLITE_SETLK_BLOCK_ON_CONNECT 0x01 - /* ** CAPI3REF: Convenience Routines For Running Queries ** METHOD: sqlite3 @@ -4080,7 +4013,7 @@ SQLITE_API sqlite3_file *sqlite3_database_file_object(const char*); ** ** The sqlite3_create_filename(D,J,W,N,P) allocates memory to hold a version of ** database filename D with corresponding journal file J and WAL file W and -** an array P of N URI Key/Value pairs. The result from +** with N URI parameters key/values pairs in the array P. The result from ** sqlite3_create_filename(D,J,W,N,P) is a pointer to a database filename that ** is safe to pass to routines like: **
            @@ -4761,7 +4694,7 @@ typedef struct sqlite3_context sqlite3_context; ** METHOD: sqlite3_stmt ** ** ^(In the SQL statement text input to [sqlite3_prepare_v2()] and its variants, -** literals may be replaced by a [parameter] that matches one of the following +** literals may be replaced by a [parameter] that matches one of following ** templates: ** **
              @@ -4806,7 +4739,7 @@ typedef struct sqlite3_context sqlite3_context; ** ** [[byte-order determination rules]] ^The byte-order of ** UTF16 input text is determined by the byte-order mark (BOM, U+FEFF) -** found in the first character, which is removed, or in the absence of a BOM +** found in first character, which is removed, or in the absence of a BOM ** the byte order is the native byte order of the host ** machine for sqlite3_bind_text16() or the byte order specified in ** the 6th parameter for sqlite3_bind_text64().)^ @@ -4826,7 +4759,7 @@ typedef struct sqlite3_context sqlite3_context; ** or sqlite3_bind_text16() or sqlite3_bind_text64() then ** that parameter must be the byte offset ** where the NUL terminator would occur assuming the string were NUL -** terminated. If any NUL characters occur at byte offsets less than +** terminated. If any NUL characters occurs at byte offsets less than ** the value of the fourth parameter then the resulting string value will ** contain embedded NULs. The result of expressions involving strings ** with embedded NULs is undefined. @@ -5038,7 +4971,7 @@ SQLITE_API const void *sqlite3_column_name16(sqlite3_stmt*, int N); ** METHOD: sqlite3_stmt ** ** ^These routines provide a means to determine the database, table, and -** table column that is the origin of a particular result column in a +** table column that is the origin of a particular result column in ** [SELECT] statement. ** ^The name of the database or table or column can be returned as ** either a UTF-8 or UTF-16 string. ^The _database_ routines return @@ -5176,7 +5109,7 @@ SQLITE_API const void *sqlite3_column_decltype16(sqlite3_stmt*,int); ** other than [SQLITE_ROW] before any subsequent invocation of ** sqlite3_step(). Failure to reset the prepared statement using ** [sqlite3_reset()] would result in an [SQLITE_MISUSE] return from -** sqlite3_step(). But after [version 3.6.23.1] ([dateof:3.6.23.1]), +** sqlite3_step(). But after [version 3.6.23.1] ([dateof:3.6.23.1], ** sqlite3_step() began ** calling [sqlite3_reset()] automatically in this circumstance rather ** than returning [SQLITE_MISUSE]. This is not considered a compatibility @@ -5607,8 +5540,8 @@ SQLITE_API int sqlite3_reset(sqlite3_stmt *pStmt); ** ** For best security, the [SQLITE_DIRECTONLY] flag is recommended for ** all application-defined SQL functions that do not need to be -** used inside of triggers, views, CHECK constraints, or other elements of -** the database schema. This flag is especially recommended for SQL +** used inside of triggers, view, CHECK constraints, or other elements of +** the database schema. This flags is especially recommended for SQL ** functions that have side effects or reveal internal application state. ** Without this flag, an attacker might be able to modify the schema of ** a database file to include invocations of the function with parameters @@ -5639,7 +5572,7 @@ SQLITE_API int sqlite3_reset(sqlite3_stmt *pStmt); ** [user-defined window functions|available here]. ** ** ^(If the final parameter to sqlite3_create_function_v2() or -** sqlite3_create_window_function() is not NULL, then it is the destructor for +** sqlite3_create_window_function() is not NULL, then it is destructor for ** the application data pointer. The destructor is invoked when the function ** is deleted, either by being overloaded or when the database connection ** closes.)^ ^The destructor is also invoked if the call to @@ -6039,7 +5972,7 @@ SQLITE_API unsigned int sqlite3_value_subtype(sqlite3_value*); ** METHOD: sqlite3_value ** ** ^The sqlite3_value_dup(V) interface makes a copy of the [sqlite3_value] -** object V and returns a pointer to that copy. ^The [sqlite3_value] returned +** object D and returns a pointer to that copy. ^The [sqlite3_value] returned ** is a [protected sqlite3_value] object even if the input is not. ** ^The sqlite3_value_dup(V) interface returns NULL if V is NULL or if a ** memory allocation fails. ^If V is a [pointer value], then the result @@ -6077,7 +6010,7 @@ SQLITE_API void sqlite3_value_free(sqlite3_value*); ** allocation error occurs. ** ** ^(The amount of space allocated by sqlite3_aggregate_context(C,N) is -** determined by the N parameter on the first successful call. Changing the +** determined by the N parameter on first successful call. Changing the ** value of N in any subsequent call to sqlite3_aggregate_context() within ** the same aggregate function instance will not resize the memory ** allocation.)^ Within the xFinal callback, it is customary to set @@ -6239,7 +6172,7 @@ SQLITE_API void sqlite3_set_auxdata(sqlite3_context*, int N, void*, void (*)(voi ** ** Security Warning: These interfaces should not be exposed in scripting ** languages or in other circumstances where it might be possible for an -** attacker to invoke them. Any agent that can invoke these interfaces +** an attacker to invoke them. Any agent that can invoke these interfaces ** can probably also take control of the process. ** ** Database connection client data is only available for SQLite @@ -6353,7 +6286,7 @@ typedef void (*sqlite3_destructor_type)(void*); ** pointed to by the 2nd parameter are taken as the application-defined ** function result. If the 3rd parameter is non-negative, then it ** must be the byte offset into the string where the NUL terminator would -** appear if the string were NUL terminated. If any NUL characters occur +** appear if the string where NUL terminated. If any NUL characters occur ** in the string at a byte offset that is less than the value of the 3rd ** parameter, then the resulting string will contain embedded NULs and the ** result of expressions operating on strings with embedded NULs is undefined. @@ -6411,7 +6344,7 @@ typedef void (*sqlite3_destructor_type)(void*); ** string and preferably a string literal. The sqlite3_result_pointer() ** routine is part of the [pointer passing interface] added for SQLite 3.20.0. ** -** If these routines are called from within a different thread +** If these routines are called from within the different thread ** than the one containing the application-defined function that received ** the [sqlite3_context] pointer, the results are undefined. */ @@ -6817,7 +6750,7 @@ SQLITE_API sqlite3 *sqlite3_db_handle(sqlite3_stmt*); ** METHOD: sqlite3 ** ** ^The sqlite3_db_name(D,N) interface returns a pointer to the schema name -** for the N-th database on database connection D, or a NULL pointer if N is +** for the N-th database on database connection D, or a NULL pointer of N is ** out of range. An N value of 0 means the main database file. An N of 1 is ** the "temp" schema. Larger values of N correspond to various ATTACH-ed ** databases. @@ -6912,7 +6845,7 @@ SQLITE_API int sqlite3_txn_state(sqlite3*,const char *zSchema); **
              The SQLITE_TXN_READ state means that the database is currently ** in a read transaction. Content has been read from the database file ** but nothing in the database file has changed. The transaction state -** will be advanced to SQLITE_TXN_WRITE if any changes occur and there are +** will advanced to SQLITE_TXN_WRITE if any changes occur and there are ** no other conflicting concurrent write transactions. The transaction ** state will revert to SQLITE_TXN_NONE following a [ROLLBACK] or ** [COMMIT].
              @@ -6921,7 +6854,7 @@ SQLITE_API int sqlite3_txn_state(sqlite3*,const char *zSchema); **
              The SQLITE_TXN_WRITE state means that the database is currently ** in a write transaction. Content has been written to the database file ** but has not yet committed. The transaction state will change to -** SQLITE_TXN_NONE at the next [ROLLBACK] or [COMMIT].
              +** to SQLITE_TXN_NONE at the next [ROLLBACK] or [COMMIT]. */ #define SQLITE_TXN_NONE 0 #define SQLITE_TXN_READ 1 @@ -7072,8 +7005,6 @@ SQLITE_API int sqlite3_autovacuum_pages( ** ** ^The second argument is a pointer to the function to invoke when a ** row is updated, inserted or deleted in a rowid table. -** ^The update hook is disabled by invoking sqlite3_update_hook() -** with a NULL pointer as the second parameter. ** ^The first argument to the callback is a copy of the third argument ** to sqlite3_update_hook(). ** ^The second callback argument is one of [SQLITE_INSERT], [SQLITE_DELETE], @@ -7202,7 +7133,7 @@ SQLITE_API int sqlite3_db_release_memory(sqlite3*); ** CAPI3REF: Impose A Limit On Heap Size ** ** These interfaces impose limits on the amount of heap memory that will be -** used by all database connections within a single process. +** by all database connections within a single process. ** ** ^The sqlite3_soft_heap_limit64() interface sets and/or queries the ** soft limit on the amount of heap memory that may be allocated by SQLite. @@ -7260,7 +7191,7 @@ SQLITE_API int sqlite3_db_release_memory(sqlite3*); **
            )^ ** ** The circumstances under which SQLite will enforce the heap limits may -** change in future releases of SQLite. +** changes in future releases of SQLite. */ SQLITE_API sqlite3_int64 sqlite3_soft_heap_limit64(sqlite3_int64 N); SQLITE_API sqlite3_int64 sqlite3_hard_heap_limit64(sqlite3_int64 N); @@ -7375,8 +7306,8 @@ SQLITE_API int sqlite3_table_column_metadata( ** ^The entry point is zProc. ** ^(zProc may be 0, in which case SQLite will try to come up with an ** entry point name on its own. It first tries "sqlite3_extension_init". -** If that does not work, it constructs a name "sqlite3_X_init" where -** X consists of the lower-case equivalent of all ASCII alphabetic +** If that does not work, it constructs a name "sqlite3_X_init" where the +** X is consists of the lower-case equivalent of all ASCII alphabetic ** characters in the filename from the last "/" to the first following ** "." and omitting any initial "lib".)^ ** ^The sqlite3_load_extension() interface returns @@ -7447,7 +7378,7 @@ SQLITE_API int sqlite3_enable_load_extension(sqlite3 *db, int onoff); ** ^(Even though the function prototype shows that xEntryPoint() takes ** no arguments and returns void, SQLite invokes xEntryPoint() with three ** arguments and expects an integer result as if the signature of the -** entry point were as follows: +** entry point where as follows: ** **
             **    int xEntryPoint(
            @@ -7611,7 +7542,7 @@ struct sqlite3_module {
             ** virtual table and might not be checked again by the byte code.)^ ^(The
             ** aConstraintUsage[].omit flag is an optimization hint. When the omit flag
             ** is left in its default setting of false, the constraint will always be
            -** checked separately in byte code.  If the omit flag is changed to true, then
            +** checked separately in byte code.  If the omit flag is change to true, then
             ** the constraint may or may not be checked in byte code.  In other words,
             ** when the omit flag is true there is no guarantee that the constraint will
             ** not be checked again using byte code.)^
            @@ -7637,7 +7568,7 @@ struct sqlite3_module {
             ** The xBestIndex method may optionally populate the idxFlags field with a
             ** mask of SQLITE_INDEX_SCAN_* flags. One such flag is
             ** [SQLITE_INDEX_SCAN_HEX], which if set causes the [EXPLAIN QUERY PLAN]
            -** output to show the idxNum as hex instead of as decimal.  Another flag is
            +** output to show the idxNum has hex instead of as decimal.  Another flag is
             ** SQLITE_INDEX_SCAN_UNIQUE, which if set indicates that the query plan will
             ** return at most one row.
             **
            @@ -7778,7 +7709,7 @@ struct sqlite3_index_info {
             ** the implementation of the [virtual table module].   ^The fourth
             ** parameter is an arbitrary client data pointer that is passed through
             ** into the [xCreate] and [xConnect] methods of the virtual table module
            -** when a new virtual table is being created or reinitialized.
            +** when a new virtual table is be being created or reinitialized.
             **
             ** ^The sqlite3_create_module_v2() interface has a fifth parameter which
             ** is a pointer to a destructor for the pClientData.  ^SQLite will
            @@ -7943,7 +7874,7 @@ typedef struct sqlite3_blob sqlite3_blob;
             ** in *ppBlob. Otherwise an [error code] is returned and, unless the error
             ** code is SQLITE_MISUSE, *ppBlob is set to NULL.)^ ^This means that, provided
             ** the API is not misused, it is always safe to call [sqlite3_blob_close()]
            -** on *ppBlob after this function returns.
            +** on *ppBlob after this function it returns.
             **
             ** This function fails with SQLITE_ERROR if any of the following are true:
             ** 
              @@ -8063,7 +7994,7 @@ SQLITE_API int sqlite3_blob_close(sqlite3_blob *); ** ** ^Returns the size in bytes of the BLOB accessible via the ** successfully opened [BLOB handle] in its only argument. ^The -** incremental blob I/O routines can only read or overwrite existing +** incremental blob I/O routines can only read or overwriting existing ** blob content; they cannot change the size of a blob. ** ** This routine only works on a [BLOB handle] which has been created @@ -8213,7 +8144,7 @@ SQLITE_API int sqlite3_vfs_unregister(sqlite3_vfs*); ** ^The sqlite3_mutex_alloc() routine allocates a new ** mutex and returns a pointer to it. ^The sqlite3_mutex_alloc() ** routine returns NULL if it is unable to allocate the requested -** mutex. The argument to sqlite3_mutex_alloc() must be one of these +** mutex. The argument to sqlite3_mutex_alloc() must one of these ** integer constants: ** **
                @@ -8446,7 +8377,7 @@ SQLITE_API int sqlite3_mutex_notheld(sqlite3_mutex*); ** CAPI3REF: Retrieve the mutex for a database connection ** METHOD: sqlite3 ** -** ^This interface returns a pointer to the [sqlite3_mutex] object that +** ^This interface returns a pointer the [sqlite3_mutex] object that ** serializes access to the [database connection] given in the argument ** when the [threading mode] is Serialized. ** ^If the [threading mode] is Single-thread or Multi-thread then this @@ -8569,7 +8500,7 @@ SQLITE_API int sqlite3_test_control(int op, ...); ** CAPI3REF: SQL Keyword Checking ** ** These routines provide access to the set of SQL language keywords -** recognized by SQLite. Applications can use these routines to determine +** recognized by SQLite. Applications can uses these routines to determine ** whether or not a specific identifier needs to be escaped (for example, ** by enclosing in double-quotes) so as not to confuse the parser. ** @@ -8737,7 +8668,7 @@ SQLITE_API void sqlite3_str_reset(sqlite3_str*); ** content of the dynamic string under construction in X. The value ** returned by [sqlite3_str_value(X)] is managed by the sqlite3_str object X ** and might be freed or altered by any subsequent method on the same -** [sqlite3_str] object. Applications must not use the pointer returned by +** [sqlite3_str] object. Applications must not used the pointer returned ** [sqlite3_str_value(X)] after any subsequent method call on the same ** object. ^Applications may change the content of the string returned ** by [sqlite3_str_value(X)] as long as they do not write into any bytes @@ -8823,7 +8754,7 @@ SQLITE_API int sqlite3_status64( ** allocation which could not be satisfied by the [SQLITE_CONFIG_PAGECACHE] ** buffer and where forced to overflow to [sqlite3_malloc()]. The ** returned value includes allocations that overflowed because they -** were too large (they were larger than the "sz" parameter to +** where too large (they were larger than the "sz" parameter to ** [SQLITE_CONFIG_PAGECACHE]) and allocations that overflowed because ** no space was left in the page cache.)^ ** @@ -8907,29 +8838,28 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** [[SQLITE_DBSTATUS_LOOKASIDE_HIT]] ^(
                SQLITE_DBSTATUS_LOOKASIDE_HIT
                **
                This parameter returns the number of malloc attempts that were ** satisfied using lookaside memory. Only the high-water value is meaningful; -** the current value is always zero.
                )^ +** the current value is always zero.)^ ** ** [[SQLITE_DBSTATUS_LOOKASIDE_MISS_SIZE]] ** ^(
                SQLITE_DBSTATUS_LOOKASIDE_MISS_SIZE
                -**
                This parameter returns the number of malloc attempts that might have +**
                This parameter returns the number malloc attempts that might have ** been satisfied using lookaside memory but failed due to the amount of ** memory requested being larger than the lookaside slot size. ** Only the high-water value is meaningful; -** the current value is always zero.
                )^ +** the current value is always zero.)^ ** ** [[SQLITE_DBSTATUS_LOOKASIDE_MISS_FULL]] ** ^(
                SQLITE_DBSTATUS_LOOKASIDE_MISS_FULL
                -**
                This parameter returns the number of malloc attempts that might have +**
                This parameter returns the number malloc attempts that might have ** been satisfied using lookaside memory but failed due to all lookaside ** memory already being in use. ** Only the high-water value is meaningful; -** the current value is always zero.
                )^ +** the current value is always zero.)^ ** ** [[SQLITE_DBSTATUS_CACHE_USED]] ^(
                SQLITE_DBSTATUS_CACHE_USED
                **
                This parameter returns the approximate number of bytes of heap ** memory used by all pager caches associated with the database connection.)^ ** ^The highwater mark associated with SQLITE_DBSTATUS_CACHE_USED is always 0. -**
                ** ** [[SQLITE_DBSTATUS_CACHE_USED_SHARED]] ** ^(
                SQLITE_DBSTATUS_CACHE_USED_SHARED
                @@ -8938,10 +8868,10 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** memory used by that pager cache is divided evenly between the attached ** connections.)^ In other words, if none of the pager caches associated ** with the database connection are shared, this request returns the same -** value as DBSTATUS_CACHE_USED. Or, if one or more of the pager caches are +** value as DBSTATUS_CACHE_USED. Or, if one or more or the pager caches are ** shared, the value returned by this call will be smaller than that returned ** by DBSTATUS_CACHE_USED. ^The highwater mark associated with -** SQLITE_DBSTATUS_CACHE_USED_SHARED is always 0. +** SQLITE_DBSTATUS_CACHE_USED_SHARED is always 0. ** ** [[SQLITE_DBSTATUS_SCHEMA_USED]] ^(
                SQLITE_DBSTATUS_SCHEMA_USED
                **
                This parameter returns the approximate number of bytes of heap @@ -8951,7 +8881,6 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** schema memory is shared with other database connections due to ** [shared cache mode] being enabled. ** ^The highwater mark associated with SQLITE_DBSTATUS_SCHEMA_USED is always 0. -**
                ** ** [[SQLITE_DBSTATUS_STMT_USED]] ^(
                SQLITE_DBSTATUS_STMT_USED
                **
                This parameter returns the approximate number of bytes of heap @@ -8988,7 +8917,7 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** been written to disk in the middle of a transaction due to the page ** cache overflowing. Transactions are more efficient if they are written ** to disk all at once. When pages spill mid-transaction, that introduces -** additional overhead. This parameter can be used to help identify +** additional overhead. This parameter can be used help identify ** inefficiencies that can be resolved by increasing the cache size. **
                ** @@ -9059,13 +8988,13 @@ SQLITE_API int sqlite3_stmt_status(sqlite3_stmt*, int op,int resetFlg); ** [[SQLITE_STMTSTATUS_SORT]]
                SQLITE_STMTSTATUS_SORT
                **
                ^This is the number of sort operations that have occurred. ** A non-zero value in this counter may indicate an opportunity to -** improve performance through careful use of indices.
                +** improvement performance through careful use of indices. ** ** [[SQLITE_STMTSTATUS_AUTOINDEX]]
                SQLITE_STMTSTATUS_AUTOINDEX
                **
                ^This is the number of rows inserted into transient indices that ** were created automatically in order to help joins run faster. ** A non-zero value in this counter may indicate an opportunity to -** improve performance by adding permanent indices that do not +** improvement performance by adding permanent indices that do not ** need to be reinitialized each time the statement is run.
                ** ** [[SQLITE_STMTSTATUS_VM_STEP]]
                SQLITE_STMTSTATUS_VM_STEP
                @@ -9074,19 +9003,19 @@ SQLITE_API int sqlite3_stmt_status(sqlite3_stmt*, int op,int resetFlg); ** to 2147483647. The number of virtual machine operations can be ** used as a proxy for the total work done by the prepared statement. ** If the number of virtual machine operations exceeds 2147483647 -** then the value returned by this statement status code is undefined. +** then the value returned by this statement status code is undefined. ** ** [[SQLITE_STMTSTATUS_REPREPARE]]
                SQLITE_STMTSTATUS_REPREPARE
                **
                ^This is the number of times that the prepare statement has been ** automatically regenerated due to schema changes or changes to -** [bound parameters] that might affect the query plan.
                +** [bound parameters] that might affect the query plan. ** ** [[SQLITE_STMTSTATUS_RUN]]
                SQLITE_STMTSTATUS_RUN
                **
                ^This is the number of times that the prepared statement has ** been run. A single "run" for the purposes of this counter is one ** or more calls to [sqlite3_step()] followed by a call to [sqlite3_reset()]. ** The counter is incremented on the first [sqlite3_step()] call of each -** cycle.
                +** cycle. ** ** [[SQLITE_STMTSTATUS_FILTER_MISS]] ** [[SQLITE_STMTSTATUS_FILTER HIT]] @@ -9096,7 +9025,7 @@ SQLITE_API int sqlite3_stmt_status(sqlite3_stmt*, int op,int resetFlg); ** step was bypassed because a Bloom filter returned not-found. The ** corresponding SQLITE_STMTSTATUS_FILTER_MISS value is the number of ** times that the Bloom filter returned a find, and thus the join step -** had to be processed as normal. +** had to be processed as normal. ** ** [[SQLITE_STMTSTATUS_MEMUSED]]
                SQLITE_STMTSTATUS_MEMUSED
                **
                ^This is the approximate number of bytes of heap memory @@ -9201,9 +9130,9 @@ struct sqlite3_pcache_page { ** SQLite will typically create one cache instance for each open database file, ** though this is not guaranteed. ^The ** first parameter, szPage, is the size in bytes of the pages that must -** be allocated by the cache. ^szPage will always be a power of two. ^The +** be allocated by the cache. ^szPage will always a power of two. ^The ** second parameter szExtra is a number of bytes of extra storage -** associated with each page cache entry. ^The szExtra parameter will be +** associated with each page cache entry. ^The szExtra parameter will ** a number less than 250. SQLite will use the ** extra szExtra bytes on each page to store metadata about the underlying ** database page on disk. The value passed into szExtra depends @@ -9211,17 +9140,17 @@ struct sqlite3_pcache_page { ** ^The third argument to xCreate(), bPurgeable, is true if the cache being ** created will be used to cache database pages of a file stored on disk, or ** false if it is used for an in-memory database. The cache implementation -** does not have to do anything special based upon the value of bPurgeable; +** does not have to do anything special based with the value of bPurgeable; ** it is purely advisory. ^On a cache where bPurgeable is false, SQLite will ** never invoke xUnpin() except to deliberately delete a page. ** ^In other words, calls to xUnpin() on a cache with bPurgeable set to ** false will always have the "discard" flag set to true. -** ^Hence, a cache created with bPurgeable set to false will +** ^Hence, a cache created with bPurgeable false will ** never contain any unpinned pages. ** ** [[the xCachesize() page cache method]] ** ^(The xCachesize() method may be called at any time by SQLite to set the -** suggested maximum cache-size (number of pages stored) for the cache +** suggested maximum cache-size (number of pages stored by) the cache ** instance passed as the first argument. This is the value configured using ** the SQLite "[PRAGMA cache_size]" command.)^ As with the bPurgeable ** parameter, the implementation is not required to do anything with this @@ -9248,12 +9177,12 @@ struct sqlite3_pcache_page { ** implementation must return a pointer to the page buffer with its content ** intact. If the requested page is not already in the cache, then the ** cache implementation should use the value of the createFlag -** parameter to help it determine what action to take: +** parameter to help it determined what action to take: ** ** **
                createFlag Behavior when page is not already in cache **
                0 Do not allocate a new page. Return NULL. -**
                1 Allocate a new page if it is easy and convenient to do so. +**
                1 Allocate a new page if it easy and convenient to do so. ** Otherwise return NULL. **
                2 Make every effort to allocate a new page. Only return ** NULL if allocating a new page is effectively impossible. @@ -9270,7 +9199,7 @@ struct sqlite3_pcache_page { ** as its second argument. If the third parameter, discard, is non-zero, ** then the page must be evicted from the cache. ** ^If the discard parameter is -** zero, then the page may be discarded or retained at the discretion of the +** zero, then the page may be discarded or retained at the discretion of ** page cache implementation. ^The page cache implementation ** may choose to evict unpinned pages at any time. ** @@ -9288,7 +9217,7 @@ struct sqlite3_pcache_page { ** When SQLite calls the xTruncate() method, the cache must discard all ** existing cache entries with page numbers (keys) greater than or equal ** to the value of the iLimit parameter passed to xTruncate(). If any -** of these pages are pinned, they become implicitly unpinned, meaning that +** of these pages are pinned, they are implicitly unpinned, meaning that ** they can be safely discarded. ** ** [[the xDestroy() page cache method]] @@ -9468,7 +9397,7 @@ typedef struct sqlite3_backup sqlite3_backup; ** external process or via a database connection other than the one being ** used by the backup operation, then the backup will be automatically ** restarted by the next call to sqlite3_backup_step(). ^If the source -** database is modified by using the same database connection as is used +** database is modified by the using the same database connection as is used ** by the backup operation, then the backup database is automatically ** updated at the same time. ** @@ -9485,7 +9414,7 @@ typedef struct sqlite3_backup sqlite3_backup; ** and may not be used following a call to sqlite3_backup_finish(). ** ** ^The value returned by sqlite3_backup_finish is [SQLITE_OK] if no -** sqlite3_backup_step() errors occurred, regardless of whether or not +** sqlite3_backup_step() errors occurred, regardless or whether or not ** sqlite3_backup_step() completed. ** ^If an out-of-memory condition or IO error occurred during any prior ** sqlite3_backup_step() call on the same [sqlite3_backup] object, then @@ -9587,7 +9516,7 @@ SQLITE_API int sqlite3_backup_pagecount(sqlite3_backup *p); ** application receives an SQLITE_LOCKED error, it may call the ** sqlite3_unlock_notify() method with the blocked connection handle as ** the first argument to register for a callback that will be invoked -** when the blocking connection's current transaction is concluded. ^The +** when the blocking connections current transaction is concluded. ^The ** callback is invoked from within the [sqlite3_step] or [sqlite3_close] ** call that concludes the blocking connection's transaction. ** @@ -9607,7 +9536,7 @@ SQLITE_API int sqlite3_backup_pagecount(sqlite3_backup *p); ** blocked connection already has a registered unlock-notify callback, ** then the new callback replaces the old.)^ ^If sqlite3_unlock_notify() is ** called with a NULL pointer as its second argument, then any existing -** unlock-notify callback is canceled. ^The blocked connection's +** unlock-notify callback is canceled. ^The blocked connections ** unlock-notify callback may also be canceled by closing the blocked ** connection using [sqlite3_close()]. ** @@ -10005,7 +9934,7 @@ SQLITE_API int sqlite3_vtab_config(sqlite3*, int op, ...); ** support constraints. In this configuration (which is the default) if ** a call to the [xUpdate] method returns [SQLITE_CONSTRAINT], then the entire ** statement is rolled back as if [ON CONFLICT | OR ABORT] had been -** specified as part of the user's SQL statement, regardless of the actual +** specified as part of the users SQL statement, regardless of the actual ** ON CONFLICT mode specified. ** ** If X is non-zero, then the virtual table implementation guarantees @@ -10039,7 +9968,7 @@ SQLITE_API int sqlite3_vtab_config(sqlite3*, int op, ...); ** [[SQLITE_VTAB_INNOCUOUS]]
                SQLITE_VTAB_INNOCUOUS
                **
                Calls of the form ** [sqlite3_vtab_config](db,SQLITE_VTAB_INNOCUOUS) from within the -** [xConnect] or [xCreate] methods of a [virtual table] implementation +** the [xConnect] or [xCreate] methods of a [virtual table] implementation ** identify that virtual table as being safe to use from within triggers ** and views. Conceptually, the SQLITE_VTAB_INNOCUOUS tag means that the ** virtual table can do no serious harm even if it is controlled by a @@ -10207,7 +10136,7 @@ SQLITE_API const char *sqlite3_vtab_collation(sqlite3_index_info*,int); **
                ** ** ^For the purposes of comparing virtual table output values to see if the -** values are the same value for sorting purposes, two NULL values are considered +** values are same value for sorting purposes, two NULL values are considered ** to be the same. In other words, the comparison operator is "IS" ** (or "IS NOT DISTINCT FROM") and not "==". ** @@ -10217,7 +10146,7 @@ SQLITE_API const char *sqlite3_vtab_collation(sqlite3_index_info*,int); ** ** ^A virtual table implementation is always free to return rows in any order ** it wants, as long as the "orderByConsumed" flag is not set. ^When the -** "orderByConsumed" flag is unset, the query planner will add extra +** the "orderByConsumed" flag is unset, the query planner will add extra ** [bytecode] to ensure that the final results returned by the SQL query are ** ordered correctly. The use of the "orderByConsumed" flag and the ** sqlite3_vtab_distinct() interface is merely an optimization. ^Careful @@ -10314,7 +10243,7 @@ SQLITE_API int sqlite3_vtab_in(sqlite3_index_info*, int iCons, int bHandle); ** sqlite3_vtab_in_next(X,P) should be one of the parameters to the ** xFilter method which invokes these routines, and specifically ** a parameter that was previously selected for all-at-once IN constraint -** processing using the [sqlite3_vtab_in()] interface in the +** processing use the [sqlite3_vtab_in()] interface in the ** [xBestIndex|xBestIndex method]. ^(If the X parameter is not ** an xFilter argument that was selected for all-at-once IN constraint ** processing, then these routines return [SQLITE_ERROR].)^ @@ -10369,7 +10298,7 @@ SQLITE_API int sqlite3_vtab_in_next(sqlite3_value *pVal, sqlite3_value **ppOut); ** and only if *V is set to a value. ^The sqlite3_vtab_rhs_value(P,J,V) ** inteface returns SQLITE_NOTFOUND if the right-hand side of the J-th ** constraint is not available. ^The sqlite3_vtab_rhs_value() interface -** can return a result code other than SQLITE_OK or SQLITE_NOTFOUND if +** can return an result code other than SQLITE_OK or SQLITE_NOTFOUND if ** something goes wrong. ** ** The sqlite3_vtab_rhs_value() interface is usually only successful if @@ -10397,8 +10326,8 @@ SQLITE_API int sqlite3_vtab_rhs_value(sqlite3_index_info*, int, sqlite3_value ** ** KEYWORDS: {conflict resolution mode} ** ** These constants are returned by [sqlite3_vtab_on_conflict()] to -** inform a [virtual table] implementation of the [ON CONFLICT] mode -** for the SQL statement being evaluated. +** inform a [virtual table] implementation what the [ON CONFLICT] mode +** is for the SQL statement being evaluated. ** ** Note that the [SQLITE_IGNORE] constant is also used as a potential ** return value from the [sqlite3_set_authorizer()] callback and that @@ -10438,39 +10367,39 @@ SQLITE_API int sqlite3_vtab_rhs_value(sqlite3_index_info*, int, sqlite3_value ** ** [[SQLITE_SCANSTAT_EST]]
                SQLITE_SCANSTAT_EST
                **
                ^The "double" variable pointed to by the V parameter will be set to the ** query planner's estimate for the average number of rows output from each -** iteration of the X-th loop. If the query planner's estimate was accurate, +** iteration of the X-th loop. If the query planner's estimates was accurate, ** then this value will approximate the quotient NVISIT/NLOOP and the ** product of this value for all prior loops with the same SELECTID will -** be the NLOOP value for the current loop.
                +** be the NLOOP value for the current loop. ** ** [[SQLITE_SCANSTAT_NAME]]
                SQLITE_SCANSTAT_NAME
                **
                ^The "const char *" variable pointed to by the V parameter will be set ** to a zero-terminated UTF-8 string containing the name of the index or table -** used for the X-th loop.
                +** used for the X-th loop. ** ** [[SQLITE_SCANSTAT_EXPLAIN]]
                SQLITE_SCANSTAT_EXPLAIN
                **
                ^The "const char *" variable pointed to by the V parameter will be set ** to a zero-terminated UTF-8 string containing the [EXPLAIN QUERY PLAN] -** description for the X-th loop.
                +** description for the X-th loop. ** ** [[SQLITE_SCANSTAT_SELECTID]]
                SQLITE_SCANSTAT_SELECTID
                **
                ^The "int" variable pointed to by the V parameter will be set to the ** id for the X-th query plan element. The id value is unique within the ** statement. The select-id is the same value as is output in the first -** column of an [EXPLAIN QUERY PLAN] query.
                +** column of an [EXPLAIN QUERY PLAN] query. ** ** [[SQLITE_SCANSTAT_PARENTID]]
                SQLITE_SCANSTAT_PARENTID
                **
                The "int" variable pointed to by the V parameter will be set to the -** id of the parent of the current query element, if applicable, or +** the id of the parent of the current query element, if applicable, or ** to zero if the query element has no parent. This is the same value as -** returned in the second column of an [EXPLAIN QUERY PLAN] query.
                +** returned in the second column of an [EXPLAIN QUERY PLAN] query. ** ** [[SQLITE_SCANSTAT_NCYCLE]]
                SQLITE_SCANSTAT_NCYCLE
                **
                The sqlite3_int64 output value is set to the number of cycles, ** according to the processor time-stamp counter, that elapsed while the ** query element was being processed. This value is not available for ** all query elements - if it is unavailable the output variable is -** set to -1.
                +** set to -1. ** */ #define SQLITE_SCANSTAT_NLOOP 0 @@ -10511,8 +10440,8 @@ SQLITE_API int sqlite3_vtab_rhs_value(sqlite3_index_info*, int, sqlite3_value ** ** sqlite3_stmt_scanstatus_v2() with a zeroed flags parameter. ** ** Parameter "idx" identifies the specific query element to retrieve statistics -** for. Query elements are numbered starting from zero. A value of -1 may -** retrieve statistics for the entire query. ^If idx is out of range +** for. Query elements are numbered starting from zero. A value of -1 may be +** to query for statistics regarding the entire query. ^If idx is out of range ** - less than -1 or greater than or equal to the total number of query ** elements used to implement the statement - a non-zero value is returned and ** the variable that pOut points to is unchanged. @@ -10555,7 +10484,7 @@ SQLITE_API void sqlite3_stmt_scanstatus_reset(sqlite3_stmt*); ** METHOD: sqlite3 ** ** ^If a write-transaction is open on [database connection] D when the -** [sqlite3_db_cacheflush(D)] interface is invoked, any dirty +** [sqlite3_db_cacheflush(D)] interface invoked, any dirty ** pages in the pager-cache that are not currently in use are written out ** to disk. A dirty page may be in use if a database cursor created by an ** active SQL statement is reading from it, or if it is page 1 of a database @@ -10669,8 +10598,8 @@ SQLITE_API int sqlite3_db_cacheflush(sqlite3*); ** triggers; and so forth. ** ** When the [sqlite3_blob_write()] API is used to update a blob column, -** the pre-update hook is invoked with SQLITE_DELETE, because -** the new values are not yet available. In this case, when a +** the pre-update hook is invoked with SQLITE_DELETE. This is because the +** in this case the new values are not available. In this case, when a ** callback made with op==SQLITE_DELETE is actually a write using the ** sqlite3_blob_write() API, the [sqlite3_preupdate_blobwrite()] returns ** the index of the column being written. In other cases, where the @@ -10923,7 +10852,7 @@ SQLITE_API SQLITE_EXPERIMENTAL int sqlite3_snapshot_recover(sqlite3 *db, const c ** For an ordinary on-disk database file, the serialization is just a ** copy of the disk file. For an in-memory database or a "TEMP" database, ** the serialization is the same sequence of bytes which would be written -** to disk if that database were backed up to disk. +** to disk if that database where backed up to disk. ** ** The usual case is that sqlite3_serialize() copies the serialization of ** the database into memory obtained from [sqlite3_malloc64()] and returns @@ -10932,7 +10861,7 @@ SQLITE_API SQLITE_EXPERIMENTAL int sqlite3_snapshot_recover(sqlite3 *db, const c ** contains the SQLITE_SERIALIZE_NOCOPY bit, then no memory allocations ** are made, and the sqlite3_serialize() function will return a pointer ** to the contiguous memory representation of the database that SQLite -** is currently using for that database, or NULL if no such contiguous +** is currently using for that database, or NULL if the no such contiguous ** memory representation of the database exists. A contiguous memory ** representation of the database will usually only exist if there has ** been a prior call to [sqlite3_deserialize(D,S,...)] with the same @@ -11003,7 +10932,7 @@ SQLITE_API unsigned char *sqlite3_serialize( ** database is currently in a read transaction or is involved in a backup ** operation. ** -** It is not possible to deserialize into the TEMP database. If the +** It is not possible to deserialized into the TEMP database. If the ** S argument to sqlite3_deserialize(D,S,P,N,M,F) is "temp" then the ** function returns SQLITE_ERROR. ** @@ -11025,7 +10954,7 @@ SQLITE_API int sqlite3_deserialize( sqlite3 *db, /* The database connection */ const char *zSchema, /* Which DB to reopen with the deserialization */ unsigned char *pData, /* The serialized database content */ - sqlite3_int64 szDb, /* Number of bytes in the deserialization */ + sqlite3_int64 szDb, /* Number bytes in the deserialization */ sqlite3_int64 szBuf, /* Total size of buffer pData[] */ unsigned mFlags /* Zero or more SQLITE_DESERIALIZE_* flags */ ); @@ -11033,7 +10962,7 @@ SQLITE_API int sqlite3_deserialize( /* ** CAPI3REF: Flags for sqlite3_deserialize() ** -** The following are allowed values for the 6th argument (the F argument) to +** The following are allowed values for 6th argument (the F argument) to ** the [sqlite3_deserialize(D,S,P,N,M,F)] interface. ** ** The SQLITE_DESERIALIZE_FREEONCLOSE means that the database serialization @@ -11558,10 +11487,9 @@ SQLITE_API void sqlite3session_table_filter( ** is inserted while a session object is enabled, then later deleted while ** the same session object is disabled, no INSERT record will appear in the ** changeset, even though the delete took place while the session was disabled. -** Or, if one field of a row is updated while a session is enabled, and -** then another field of the same row is updated while the session is disabled, -** the resulting changeset will contain an UPDATE change that updates both -** fields. +** Or, if one field of a row is updated while a session is disabled, and +** another field of the same row is updated while the session is enabled, the +** resulting changeset will contain an UPDATE change that updates both fields. */ SQLITE_API int sqlite3session_changeset( sqlite3_session *pSession, /* Session object */ @@ -11633,9 +11561,8 @@ SQLITE_API sqlite3_int64 sqlite3session_changeset_size(sqlite3_session *pSession ** database zFrom the contents of the two compatible tables would be ** identical. ** -** Unless the call to this function is a no-op as described above, it is an -** error if database zFrom does not exist or does not contain the required -** compatible table. +** It an error if database zFrom does not exist or does not contain the +** required compatible table. ** ** If the operation is successful, SQLITE_OK is returned. Otherwise, an SQLite ** error code. In this case, if argument pzErrMsg is not NULL, *pzErrMsg @@ -11770,7 +11697,7 @@ SQLITE_API int sqlite3changeset_start_v2( ** The following flags may passed via the 4th parameter to ** [sqlite3changeset_start_v2] and [sqlite3changeset_start_v2_strm]: ** -**
                SQLITE_CHANGESETSTART_INVERT
                +**
                SQLITE_CHANGESETAPPLY_INVERT
                ** Invert the changeset while iterating through it. This is equivalent to ** inverting a changeset using sqlite3changeset_invert() before applying it. ** It is an error to specify this flag with a patchset. @@ -12085,6 +12012,19 @@ SQLITE_API int sqlite3changeset_concat( void **ppOut /* OUT: Buffer containing output changeset */ ); + +/* +** CAPI3REF: Upgrade the Schema of a Changeset/Patchset +*/ +SQLITE_API int sqlite3changeset_upgrade( + sqlite3 *db, + const char *zDb, + int nIn, const void *pIn, /* Input changeset */ + int *pnOut, void **ppOut /* OUT: Inverse of input */ +); + + + /* ** CAPI3REF: Changegroup Handle ** diff --git a/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_userauth.go b/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_userauth.go index 5a492766..76d84016 100644 --- a/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_userauth.go +++ b/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_userauth.go @@ -16,10 +16,53 @@ package sqlite3 #else #include #endif +#include + +static int +_sqlite3_user_authenticate(sqlite3* db, const char* zUsername, const char* aPW, int nPW) +{ + return sqlite3_user_authenticate(db, zUsername, aPW, nPW); +} + +static int +_sqlite3_user_add(sqlite3* db, const char* zUsername, const char* aPW, int nPW, int isAdmin) +{ + return sqlite3_user_add(db, zUsername, aPW, nPW, isAdmin); +} + +static int +_sqlite3_user_change(sqlite3* db, const char* zUsername, const char* aPW, int nPW, int isAdmin) +{ + return sqlite3_user_change(db, zUsername, aPW, nPW, isAdmin); +} + +static int +_sqlite3_user_delete(sqlite3* db, const char* zUsername) +{ + return sqlite3_user_delete(db, zUsername); +} + +static int +_sqlite3_auth_enabled(sqlite3* db) +{ + int exists = -1; + + sqlite3_stmt *stmt; + sqlite3_prepare_v2(db, "select count(type) from sqlite_master WHERE type='table' and name='sqlite_user';", -1, &stmt, NULL); + + while ( sqlite3_step(stmt) == SQLITE_ROW) { + exists = sqlite3_column_int(stmt, 0); + } + + sqlite3_finalize(stmt); + + return exists; +} */ import "C" import ( "errors" + "unsafe" ) const ( @@ -27,9 +70,8 @@ const ( ) var ( - ErrUnauthorized = errors.New("SQLITE_AUTH: Unauthorized") - ErrAdminRequired = errors.New("SQLITE_AUTH: Unauthorized; Admin Privileges Required") - errUserAuthNoLongerSupported = errors.New("sqlite3: the sqlite_userauth tag is no longer supported as the userauth extension is no longer supported by the SQLite authors, see https://github.com/mattn/go-sqlite3/issues/1341") + ErrUnauthorized = errors.New("SQLITE_AUTH: Unauthorized") + ErrAdminRequired = errors.New("SQLITE_AUTH: Unauthorized; Admin Privileges Required") ) // Authenticate will perform an authentication of the provided username @@ -46,7 +88,15 @@ var ( // If the SQLITE_USER table is not present in the database file, then // this interface is a harmless no-op returning SQLITE_OK. func (c *SQLiteConn) Authenticate(username, password string) error { - return errUserAuthNoLongerSupported + rv := c.authenticate(username, password) + switch rv { + case C.SQLITE_ERROR, C.SQLITE_AUTH: + return ErrUnauthorized + case C.SQLITE_OK: + return nil + default: + return c.lastError() + } } // authenticate provides the actual authentication to SQLite. @@ -59,7 +109,17 @@ func (c *SQLiteConn) Authenticate(username, password string) error { // C.SQLITE_ERROR (1) // C.SQLITE_AUTH (23) func (c *SQLiteConn) authenticate(username, password string) int { - return 1 + // Allocate C Variables + cuser := C.CString(username) + cpass := C.CString(password) + + // Free C Variables + defer func() { + C.free(unsafe.Pointer(cuser)) + C.free(unsafe.Pointer(cpass)) + }() + + return int(C._sqlite3_user_authenticate(c.db, cuser, cpass, C.int(len(password)))) } // AuthUserAdd can be used (by an admin user only) @@ -71,7 +131,20 @@ func (c *SQLiteConn) authenticate(username, password string) int { // for any ATTACH-ed databases. Any call to AuthUserAdd by a // non-admin user results in an error. func (c *SQLiteConn) AuthUserAdd(username, password string, admin bool) error { - return errUserAuthNoLongerSupported + isAdmin := 0 + if admin { + isAdmin = 1 + } + + rv := c.authUserAdd(username, password, isAdmin) + switch rv { + case C.SQLITE_ERROR, C.SQLITE_AUTH: + return ErrAdminRequired + case C.SQLITE_OK: + return nil + default: + return c.lastError() + } } // authUserAdd enables the User Authentication if not enabled. @@ -89,7 +162,17 @@ func (c *SQLiteConn) AuthUserAdd(username, password string, admin bool) error { // C.SQLITE_ERROR (1) // C.SQLITE_AUTH (23) func (c *SQLiteConn) authUserAdd(username, password string, admin int) int { - return 1 + // Allocate C Variables + cuser := C.CString(username) + cpass := C.CString(password) + + // Free C Variables + defer func() { + C.free(unsafe.Pointer(cuser)) + C.free(unsafe.Pointer(cpass)) + }() + + return int(C._sqlite3_user_add(c.db, cuser, cpass, C.int(len(password)), C.int(admin))) } // AuthUserChange can be used to change a users @@ -98,7 +181,20 @@ func (c *SQLiteConn) authUserAdd(username, password string, admin int) int { // credentials or admin privilege setting. No user may change their own // admin privilege setting. func (c *SQLiteConn) AuthUserChange(username, password string, admin bool) error { - return errUserAuthNoLongerSupported + isAdmin := 0 + if admin { + isAdmin = 1 + } + + rv := c.authUserChange(username, password, isAdmin) + switch rv { + case C.SQLITE_ERROR, C.SQLITE_AUTH: + return ErrAdminRequired + case C.SQLITE_OK: + return nil + default: + return c.lastError() + } } // authUserChange allows to modify a user. @@ -119,7 +215,17 @@ func (c *SQLiteConn) AuthUserChange(username, password string, admin bool) error // C.SQLITE_ERROR (1) // C.SQLITE_AUTH (23) func (c *SQLiteConn) authUserChange(username, password string, admin int) int { - return 1 + // Allocate C Variables + cuser := C.CString(username) + cpass := C.CString(password) + + // Free C Variables + defer func() { + C.free(unsafe.Pointer(cuser)) + C.free(unsafe.Pointer(cpass)) + }() + + return int(C._sqlite3_user_change(c.db, cuser, cpass, C.int(len(password)), C.int(admin))) } // AuthUserDelete can be used (by an admin user only) @@ -128,7 +234,15 @@ func (c *SQLiteConn) authUserChange(username, password string, admin int) int { // the database cannot be converted into a no-authentication-required // database. func (c *SQLiteConn) AuthUserDelete(username string) error { - return errUserAuthNoLongerSupported + rv := c.authUserDelete(username) + switch rv { + case C.SQLITE_ERROR, C.SQLITE_AUTH: + return ErrAdminRequired + case C.SQLITE_OK: + return nil + default: + return c.lastError() + } } // authUserDelete can be used to delete a user. @@ -144,12 +258,25 @@ func (c *SQLiteConn) AuthUserDelete(username string) error { // C.SQLITE_ERROR (1) // C.SQLITE_AUTH (23) func (c *SQLiteConn) authUserDelete(username string) int { - return 1 + // Allocate C Variables + cuser := C.CString(username) + + // Free C Variables + defer func() { + C.free(unsafe.Pointer(cuser)) + }() + + return int(C._sqlite3_user_delete(c.db, cuser)) } // AuthEnabled checks if the database is protected by user authentication func (c *SQLiteConn) AuthEnabled() (exists bool) { - return false + rv := c.authEnabled() + if rv == 1 { + exists = true + } + + return } // authEnabled perform the actual check for user authentication. @@ -162,7 +289,7 @@ func (c *SQLiteConn) AuthEnabled() (exists bool) { // 0 - Disabled // 1 - Enabled func (c *SQLiteConn) authEnabled() int { - return 0 + return int(C._sqlite3_auth_enabled(c.db)) } // EOF diff --git a/vendor/github.com/mattn/go-sqlite3/sqlite3ext.h b/vendor/github.com/mattn/go-sqlite3/sqlite3ext.h index 3a5e0a4e..935437bb 100644 --- a/vendor/github.com/mattn/go-sqlite3/sqlite3ext.h +++ b/vendor/github.com/mattn/go-sqlite3/sqlite3ext.h @@ -371,8 +371,6 @@ struct sqlite3_api_routines { /* Version 3.44.0 and later */ void *(*get_clientdata)(sqlite3*,const char*); int (*set_clientdata)(sqlite3*, const char*, void*, void(*)(void*)); - /* Version 3.50.0 and later */ - int (*setlk_timeout)(sqlite3*,int,int); }; /* @@ -706,8 +704,6 @@ typedef int (*sqlite3_loadext_entry)( /* Version 3.44.0 and later */ #define sqlite3_get_clientdata sqlite3_api->get_clientdata #define sqlite3_set_clientdata sqlite3_api->set_clientdata -/* Version 3.50.0 and later */ -#define sqlite3_setlk_timeout sqlite3_api->setlk_timeout #endif /* !defined(SQLITE_CORE) && !defined(SQLITE_OMIT_LOAD_EXTENSION) */ #if !defined(SQLITE_CORE) && !defined(SQLITE_OMIT_LOAD_EXTENSION) diff --git a/vendor/github.com/prometheus/client_golang/prometheus/internal/difflib.go b/vendor/github.com/prometheus/client_golang/prometheus/internal/difflib.go index 7bac0da3..8b016355 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/internal/difflib.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/internal/difflib.go @@ -453,7 +453,7 @@ func (m *SequenceMatcher) GetGroupedOpCodes(n int) [][]OpCode { } group = append(group, OpCode{c.Tag, i1, i2, j1, j2}) } - if len(group) > 0 && (len(group) != 1 || group[0].Tag != 'e') { + if len(group) > 0 && !(len(group) == 1 && group[0].Tag == 'e') { groups = append(groups, group) } return groups @@ -568,7 +568,7 @@ func WriteUnifiedDiff(writer io.Writer, diff UnifiedDiff) error { buf := bufio.NewWriter(writer) defer buf.Flush() wf := func(format string, args ...interface{}) error { - _, err := fmt.Fprintf(buf, format, args...) + _, err := buf.WriteString(fmt.Sprintf(format, args...)) return err } ws := func(s string) error { diff --git a/vendor/github.com/prometheus/client_golang/prometheus/metric.go b/vendor/github.com/prometheus/client_golang/prometheus/metric.go index 76e59f12..592eec3e 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/metric.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/metric.go @@ -186,31 +186,21 @@ func (m *withExemplarsMetric) Write(pb *dto.Metric) error { case pb.Counter != nil: pb.Counter.Exemplar = m.exemplars[len(m.exemplars)-1] case pb.Histogram != nil: - h := pb.Histogram for _, e := range m.exemplars { - if (h.GetZeroThreshold() != 0 || h.GetZeroCount() != 0 || - len(h.PositiveSpan) != 0 || len(h.NegativeSpan) != 0) && - e.GetTimestamp() != nil { - h.Exemplars = append(h.Exemplars, e) - if len(h.Bucket) == 0 { - // Don't proceed to classic buckets if there are none. - continue - } - } - // h.Bucket are sorted by UpperBound. - i := sort.Search(len(h.Bucket), func(i int) bool { - return h.Bucket[i].GetUpperBound() >= e.GetValue() + // pb.Histogram.Bucket are sorted by UpperBound. + i := sort.Search(len(pb.Histogram.Bucket), func(i int) bool { + return pb.Histogram.Bucket[i].GetUpperBound() >= e.GetValue() }) - if i < len(h.Bucket) { - h.Bucket[i].Exemplar = e + if i < len(pb.Histogram.Bucket) { + pb.Histogram.Bucket[i].Exemplar = e } else { // The +Inf bucket should be explicitly added if there is an exemplar for it, similar to non-const histogram logic in https://github.com/prometheus/client_golang/blob/main/prometheus/histogram.go#L357-L365. b := &dto.Bucket{ - CumulativeCount: proto.Uint64(h.GetSampleCount()), + CumulativeCount: proto.Uint64(pb.Histogram.GetSampleCount()), UpperBound: proto.Float64(math.Inf(1)), Exemplar: e, } - h.Bucket = append(h.Bucket, b) + pb.Histogram.Bucket = append(pb.Histogram.Bucket, b) } } default: @@ -237,7 +227,6 @@ type Exemplar struct { // Only last applicable exemplar is injected from the list. // For example for Counter it means last exemplar is injected. // For Histogram, it means last applicable exemplar for each bucket is injected. -// For a Native Histogram, all valid exemplars are injected. // // NewMetricWithExemplars works best with MustNewConstMetric and // MustNewConstHistogram, see example. diff --git a/vendor/github.com/prometheus/client_golang/prometheus/process_collector_darwin.go b/vendor/github.com/prometheus/client_golang/prometheus/process_collector_darwin.go index b32c95fa..0a61b984 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/process_collector_darwin.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/process_collector_darwin.go @@ -25,9 +25,9 @@ import ( "golang.org/x/sys/unix" ) -// errNotImplemented is returned by stub functions that replace cgo functions, when cgo +// notImplementedErr is returned by stub functions that replace cgo functions, when cgo // isn't available. -var errNotImplemented = errors.New("not implemented") +var notImplementedErr = errors.New("not implemented") type memoryInfo struct { vsize uint64 // Virtual memory size in bytes @@ -101,7 +101,7 @@ func (c *processCollector) processCollect(ch chan<- Metric) { if memInfo, err := getMemory(); err == nil { ch <- MustNewConstMetric(c.rss, GaugeValue, float64(memInfo.rss)) ch <- MustNewConstMetric(c.vsize, GaugeValue, float64(memInfo.vsize)) - } else if !errors.Is(err, errNotImplemented) { + } else if !errors.Is(err, notImplementedErr) { // Don't report an error when support is not compiled in. c.reportError(ch, c.rss, err) c.reportError(ch, c.vsize, err) diff --git a/vendor/github.com/prometheus/client_golang/prometheus/process_collector_mem_nocgo_darwin.go b/vendor/github.com/prometheus/client_golang/prometheus/process_collector_mem_nocgo_darwin.go index 37886512..8ddb0995 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/process_collector_mem_nocgo_darwin.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/process_collector_mem_nocgo_darwin.go @@ -16,7 +16,7 @@ package prometheus func getMemory() (*memoryInfo, error) { - return nil, errNotImplemented + return nil, notImplementedErr } // describe returns all descriptions of the collector for Darwin. diff --git a/vendor/github.com/prometheus/client_golang/prometheus/process_collector_procfsenabled.go b/vendor/github.com/prometheus/client_golang/prometheus/process_collector_procfsenabled.go index 8074f70f..9f4b130b 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/process_collector_procfsenabled.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/process_collector_procfsenabled.go @@ -66,11 +66,11 @@ func (c *processCollector) processCollect(ch chan<- Metric) { if netstat, err := p.Netstat(); err == nil { var inOctets, outOctets float64 - if netstat.InOctets != nil { - inOctets = *netstat.InOctets + if netstat.IpExt.InOctets != nil { + inOctets = *netstat.IpExt.InOctets } - if netstat.OutOctets != nil { - outOctets = *netstat.OutOctets + if netstat.IpExt.OutOctets != nil { + outOctets = *netstat.IpExt.OutOctets } ch <- MustNewConstMetric(c.inBytes, CounterValue, inOctets) ch <- MustNewConstMetric(c.outBytes, CounterValue, outOctets) diff --git a/vendor/github.com/prometheus/client_golang/prometheus/promhttp/instrument_server.go b/vendor/github.com/prometheus/client_golang/prometheus/promhttp/instrument_server.go index 9332b024..356edb78 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/promhttp/instrument_server.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/promhttp/instrument_server.go @@ -392,7 +392,7 @@ func isLabelCurried(c prometheus.Collector, label string) bool { func labels(code, method bool, reqMethod string, status int, extraMethods ...string) prometheus.Labels { labels := prometheus.Labels{} - if !code && !method { + if !(code || method) { return labels } diff --git a/vendor/github.com/prometheus/client_golang/prometheus/vec.go b/vendor/github.com/prometheus/client_golang/prometheus/vec.go index 487b4665..2c808eec 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/vec.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/vec.go @@ -79,7 +79,7 @@ func (m *MetricVec) DeleteLabelValues(lvs ...string) bool { return false } - return m.deleteByHashWithLabelValues(h, lvs, m.curry) + return m.metricMap.deleteByHashWithLabelValues(h, lvs, m.curry) } // Delete deletes the metric where the variable labels are the same as those @@ -101,7 +101,7 @@ func (m *MetricVec) Delete(labels Labels) bool { return false } - return m.deleteByHashWithLabels(h, labels, m.curry) + return m.metricMap.deleteByHashWithLabels(h, labels, m.curry) } // DeletePartialMatch deletes all metrics where the variable labels contain all of those @@ -114,7 +114,7 @@ func (m *MetricVec) DeletePartialMatch(labels Labels) int { labels, closer := constrainLabels(m.desc, labels) defer closer() - return m.deleteByLabels(labels, m.curry) + return m.metricMap.deleteByLabels(labels, m.curry) } // Without explicit forwarding of Describe, Collect, Reset, those methods won't @@ -216,7 +216,7 @@ func (m *MetricVec) GetMetricWithLabelValues(lvs ...string) (Metric, error) { return nil, err } - return m.getOrCreateMetricWithLabelValues(h, lvs, m.curry), nil + return m.metricMap.getOrCreateMetricWithLabelValues(h, lvs, m.curry), nil } // GetMetricWith returns the Metric for the given Labels map (the label names @@ -244,7 +244,7 @@ func (m *MetricVec) GetMetricWith(labels Labels) (Metric, error) { return nil, err } - return m.getOrCreateMetricWithLabels(h, labels, m.curry), nil + return m.metricMap.getOrCreateMetricWithLabels(h, labels, m.curry), nil } func (m *MetricVec) hashLabelValues(vals []string) (uint64, error) { diff --git a/vendor/github.com/prometheus/client_golang/prometheus/wrap.go b/vendor/github.com/prometheus/client_golang/prometheus/wrap.go index 2ed12850..25da157f 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/wrap.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/wrap.go @@ -63,7 +63,7 @@ func WrapRegistererWith(labels Labels, reg Registerer) Registerer { // metric names that are standardized across applications, as that would break // horizontal monitoring, for example the metrics provided by the Go collector // (see NewGoCollector) and the process collector (see NewProcessCollector). (In -// fact, those metrics are already prefixed with "go_" or "process_", +// fact, those metrics are already prefixed with “go_” or “process_”, // respectively.) // // Conflicts between Collectors registered through the original Registerer with @@ -78,40 +78,6 @@ func WrapRegistererWithPrefix(prefix string, reg Registerer) Registerer { } } -// WrapCollectorWith returns a Collector wrapping the provided Collector. The -// wrapped Collector will add the provided Labels to all Metrics it collects (as -// ConstLabels). The Metrics collected by the unmodified Collector must not -// duplicate any of those labels. -// -// WrapCollectorWith can be useful to work with multiple instances of a third -// party library that does not expose enough flexibility on the lifecycle of its -// registered metrics. -// For example, let's say you have a foo.New(reg Registerer) constructor that -// registers metrics but never unregisters them, and you want to create multiple -// instances of foo.Foo with different labels. -// The way to achieve that, is to create a new Registry, pass it to foo.New, -// then use WrapCollectorWith to wrap that Registry with the desired labels and -// register that as a collector in your main Registry. -// Then you can un-register the wrapped collector effectively un-registering the -// metrics registered by foo.New. -func WrapCollectorWith(labels Labels, c Collector) Collector { - return &wrappingCollector{ - wrappedCollector: c, - labels: labels, - } -} - -// WrapCollectorWithPrefix returns a Collector wrapping the provided Collector. The -// wrapped Collector will add the provided prefix to the name of all Metrics it collects. -// -// See the documentation of WrapCollectorWith for more details on the use case. -func WrapCollectorWithPrefix(prefix string, c Collector) Collector { - return &wrappingCollector{ - wrappedCollector: c, - prefix: prefix, - } -} - type wrappingRegisterer struct { wrappedRegisterer Registerer prefix string diff --git a/vendor/github.com/prometheus/procfs/Makefile.common b/vendor/github.com/prometheus/procfs/Makefile.common index 0ed55c2b..4de21512 100644 --- a/vendor/github.com/prometheus/procfs/Makefile.common +++ b/vendor/github.com/prometheus/procfs/Makefile.common @@ -33,7 +33,7 @@ GOHOSTOS ?= $(shell $(GO) env GOHOSTOS) GOHOSTARCH ?= $(shell $(GO) env GOHOSTARCH) GO_VERSION ?= $(shell $(GO) version) -GO_VERSION_NUMBER ?= $(word 3, $(GO_VERSION))Error Parsing File +GO_VERSION_NUMBER ?= $(word 3, $(GO_VERSION)) PRE_GO_111 ?= $(shell echo $(GO_VERSION_NUMBER) | grep -E 'go1\.(10|[0-9])\.') PROMU := $(FIRST_GOPATH)/bin/promu @@ -61,7 +61,8 @@ PROMU_URL := https://github.com/prometheus/promu/releases/download/v$(PROMU_ SKIP_GOLANGCI_LINT := GOLANGCI_LINT := GOLANGCI_LINT_OPTS ?= -GOLANGCI_LINT_VERSION ?= v2.0.2 +GOLANGCI_LINT_VERSION ?= v2.1.5 +GOLANGCI_FMT_OPTS ?= # golangci-lint only supports linux, darwin and windows platforms on i386/amd64/arm64. # windows isn't included here because of the path separator being different. ifeq ($(GOHOSTOS),$(filter $(GOHOSTOS),linux darwin)) @@ -156,9 +157,13 @@ $(GOTEST_DIR): @mkdir -p $@ .PHONY: common-format -common-format: +common-format: $(GOLANGCI_LINT) @echo ">> formatting code" $(GO) fmt $(pkgs) +ifdef GOLANGCI_LINT + @echo ">> formatting code with golangci-lint" + $(GOLANGCI_LINT) fmt $(GOLANGCI_FMT_OPTS) +endif .PHONY: common-vet common-vet: @@ -248,8 +253,8 @@ $(PROMU): cp $(PROMU_TMP)/promu-$(PROMU_VERSION).$(GO_BUILD_PLATFORM)/promu $(FIRST_GOPATH)/bin/promu rm -r $(PROMU_TMP) -.PHONY: proto -proto: +.PHONY: common-proto +common-proto: @echo ">> generating code from proto files" @./scripts/genproto.sh diff --git a/vendor/github.com/prometheus/procfs/mdstat.go b/vendor/github.com/prometheus/procfs/mdstat.go index 67a9d2b4..1fd4381b 100644 --- a/vendor/github.com/prometheus/procfs/mdstat.go +++ b/vendor/github.com/prometheus/procfs/mdstat.go @@ -123,13 +123,16 @@ func parseMDStat(mdStatData []byte) ([]MDStat, error) { finish := float64(0) pct := float64(0) recovering := strings.Contains(lines[syncLineIdx], "recovery") + reshaping := strings.Contains(lines[syncLineIdx], "reshape") resyncing := strings.Contains(lines[syncLineIdx], "resync") checking := strings.Contains(lines[syncLineIdx], "check") // Append recovery and resyncing state info. - if recovering || resyncing || checking { + if recovering || resyncing || checking || reshaping { if recovering { state = "recovering" + } else if reshaping { + state = "reshaping" } else if checking { state = "checking" } else { diff --git a/vendor/github.com/prometheus/procfs/meminfo.go b/vendor/github.com/prometheus/procfs/meminfo.go index 4b2c4050..937e1f96 100644 --- a/vendor/github.com/prometheus/procfs/meminfo.go +++ b/vendor/github.com/prometheus/procfs/meminfo.go @@ -66,6 +66,10 @@ type Meminfo struct { // Memory which has been evicted from RAM, and is temporarily // on the disk SwapFree *uint64 + // Memory consumed by the zswap backend (compressed size) + Zswap *uint64 + // Amount of anonymous memory stored in zswap (original size) + Zswapped *uint64 // Memory which is waiting to get written back to the disk Dirty *uint64 // Memory which is actively being written back to the disk @@ -85,6 +89,8 @@ type Meminfo struct { // amount of memory dedicated to the lowest level of page // tables. PageTables *uint64 + // secondary page tables. + SecPageTables *uint64 // NFS pages sent to the server, but not yet committed to // stable storage NFSUnstable *uint64 @@ -129,15 +135,18 @@ type Meminfo struct { Percpu *uint64 HardwareCorrupted *uint64 AnonHugePages *uint64 + FileHugePages *uint64 ShmemHugePages *uint64 ShmemPmdMapped *uint64 CmaTotal *uint64 CmaFree *uint64 + Unaccepted *uint64 HugePagesTotal *uint64 HugePagesFree *uint64 HugePagesRsvd *uint64 HugePagesSurp *uint64 Hugepagesize *uint64 + Hugetlb *uint64 DirectMap4k *uint64 DirectMap2M *uint64 DirectMap1G *uint64 @@ -161,6 +170,8 @@ type Meminfo struct { MlockedBytes *uint64 SwapTotalBytes *uint64 SwapFreeBytes *uint64 + ZswapBytes *uint64 + ZswappedBytes *uint64 DirtyBytes *uint64 WritebackBytes *uint64 AnonPagesBytes *uint64 @@ -171,6 +182,7 @@ type Meminfo struct { SUnreclaimBytes *uint64 KernelStackBytes *uint64 PageTablesBytes *uint64 + SecPageTablesBytes *uint64 NFSUnstableBytes *uint64 BounceBytes *uint64 WritebackTmpBytes *uint64 @@ -182,11 +194,14 @@ type Meminfo struct { PercpuBytes *uint64 HardwareCorruptedBytes *uint64 AnonHugePagesBytes *uint64 + FileHugePagesBytes *uint64 ShmemHugePagesBytes *uint64 ShmemPmdMappedBytes *uint64 CmaTotalBytes *uint64 CmaFreeBytes *uint64 + UnacceptedBytes *uint64 HugepagesizeBytes *uint64 + HugetlbBytes *uint64 DirectMap4kBytes *uint64 DirectMap2MBytes *uint64 DirectMap1GBytes *uint64 @@ -287,6 +302,12 @@ func parseMemInfo(r io.Reader) (*Meminfo, error) { case "SwapFree:": m.SwapFree = &val m.SwapFreeBytes = &valBytes + case "Zswap:": + m.Zswap = &val + m.ZswapBytes = &valBytes + case "Zswapped:": + m.Zswapped = &val + m.ZswapBytes = &valBytes case "Dirty:": m.Dirty = &val m.DirtyBytes = &valBytes @@ -317,6 +338,9 @@ func parseMemInfo(r io.Reader) (*Meminfo, error) { case "PageTables:": m.PageTables = &val m.PageTablesBytes = &valBytes + case "SecPageTables:": + m.SecPageTables = &val + m.SecPageTablesBytes = &valBytes case "NFS_Unstable:": m.NFSUnstable = &val m.NFSUnstableBytes = &valBytes @@ -350,6 +374,9 @@ func parseMemInfo(r io.Reader) (*Meminfo, error) { case "AnonHugePages:": m.AnonHugePages = &val m.AnonHugePagesBytes = &valBytes + case "FileHugePages:": + m.FileHugePages = &val + m.FileHugePagesBytes = &valBytes case "ShmemHugePages:": m.ShmemHugePages = &val m.ShmemHugePagesBytes = &valBytes @@ -362,6 +389,9 @@ func parseMemInfo(r io.Reader) (*Meminfo, error) { case "CmaFree:": m.CmaFree = &val m.CmaFreeBytes = &valBytes + case "Unaccepted:": + m.Unaccepted = &val + m.UnacceptedBytes = &valBytes case "HugePages_Total:": m.HugePagesTotal = &val case "HugePages_Free:": @@ -373,6 +403,9 @@ func parseMemInfo(r io.Reader) (*Meminfo, error) { case "Hugepagesize:": m.Hugepagesize = &val m.HugepagesizeBytes = &valBytes + case "Hugetlb:": + m.Hugetlb = &val + m.HugetlbBytes = &valBytes case "DirectMap4k:": m.DirectMap4k = &val m.DirectMap4kBytes = &valBytes diff --git a/vendor/github.com/prometheus/procfs/proc_stat.go b/vendor/github.com/prometheus/procfs/proc_stat.go index 06a8d931..3328556b 100644 --- a/vendor/github.com/prometheus/procfs/proc_stat.go +++ b/vendor/github.com/prometheus/procfs/proc_stat.go @@ -101,6 +101,12 @@ type ProcStat struct { RSS int // Soft limit in bytes on the rss of the process. RSSLimit uint64 + // The address above which program text can run. + StartCode uint64 + // The address below which program text can run. + EndCode uint64 + // The address of the start (i.e., bottom) of the stack. + StartStack uint64 // CPU number last executed on. Processor uint // Real-time scheduling priority, a number in the range 1 to 99 for processes @@ -177,9 +183,9 @@ func (p Proc) Stat() (ProcStat, error) { &s.VSize, &s.RSS, &s.RSSLimit, - &ignoreUint64, - &ignoreUint64, - &ignoreUint64, + &s.StartCode, + &s.EndCode, + &s.StartStack, &ignoreUint64, &ignoreUint64, &ignoreUint64, diff --git a/vendor/github.com/prometheus/procfs/proc_statm.go b/vendor/github.com/prometheus/procfs/proc_statm.go new file mode 100644 index 00000000..ed579842 --- /dev/null +++ b/vendor/github.com/prometheus/procfs/proc_statm.go @@ -0,0 +1,116 @@ +// Copyright 2025 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package procfs + +import ( + "os" + "strconv" + "strings" + + "github.com/prometheus/procfs/internal/util" +) + +// - https://man7.org/linux/man-pages/man5/proc_pid_statm.5.html + +// ProcStatm Provides memory usage information for a process, measured in memory pages. +// Read from /proc/[pid]/statm. +type ProcStatm struct { + // The process ID. + PID int + // total program size (same as VmSize in status) + Size uint64 + // resident set size (same as VmRSS in status) + Resident uint64 + // number of resident shared pages (i.e., backed by a file) + Shared uint64 + // text (code) + Text uint64 + // library (unused since Linux 2.6; always 0) + Lib uint64 + // data + stack + Data uint64 + // dirty pages (unused since Linux 2.6; always 0) + Dt uint64 +} + +// NewStatm returns the current status information of the process. +// Deprecated: Use p.Statm() instead. +func (p Proc) NewStatm() (ProcStatm, error) { + return p.Statm() +} + +// Statm returns the current memory usage information of the process. +func (p Proc) Statm() (ProcStatm, error) { + data, err := util.ReadFileNoStat(p.path("statm")) + if err != nil { + return ProcStatm{}, err + } + + statmSlice, err := parseStatm(data) + if err != nil { + return ProcStatm{}, err + } + + procStatm := ProcStatm{ + PID: p.PID, + Size: statmSlice[0], + Resident: statmSlice[1], + Shared: statmSlice[2], + Text: statmSlice[3], + Lib: statmSlice[4], + Data: statmSlice[5], + Dt: statmSlice[6], + } + + return procStatm, nil +} + +// parseStatm return /proc/[pid]/statm data to uint64 slice. +func parseStatm(data []byte) ([]uint64, error) { + var statmSlice []uint64 + statmItems := strings.Fields(string(data)) + for i := 0; i < len(statmItems); i++ { + statmItem, err := strconv.ParseUint(statmItems[i], 10, 64) + if err != nil { + return nil, err + } + statmSlice = append(statmSlice, statmItem) + } + return statmSlice, nil +} + +// SizeBytes returns the process of total program size in bytes. +func (s ProcStatm) SizeBytes() uint64 { + return s.Size * uint64(os.Getpagesize()) +} + +// ResidentBytes returns the process of resident set size in bytes. +func (s ProcStatm) ResidentBytes() uint64 { + return s.Resident * uint64(os.Getpagesize()) +} + +// SHRBytes returns the process of share memory size in bytes. +func (s ProcStatm) SHRBytes() uint64 { + return s.Shared * uint64(os.Getpagesize()) +} + +// TextBytes returns the process of text (code) size in bytes. +func (s ProcStatm) TextBytes() uint64 { + return s.Text * uint64(os.Getpagesize()) +} + +// DataBytes returns the process of data + stack size in bytes. +func (s ProcStatm) DataBytes() uint64 { + return s.Data * uint64(os.Getpagesize()) +} diff --git a/vendor/github.com/stretchr/testify/assert/assertion_compare.go b/vendor/github.com/stretchr/testify/assert/assertion_compare.go index ffb24e8e..7e19eba0 100644 --- a/vendor/github.com/stretchr/testify/assert/assertion_compare.go +++ b/vendor/github.com/stretchr/testify/assert/assertion_compare.go @@ -390,8 +390,7 @@ func Greater(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface if h, ok := t.(tHelper); ok { h.Helper() } - failMessage := fmt.Sprintf("\"%v\" is not greater than \"%v\"", e1, e2) - return compareTwoValues(t, e1, e2, []compareResult{compareGreater}, failMessage, msgAndArgs...) + return compareTwoValues(t, e1, e2, []compareResult{compareGreater}, "\"%v\" is not greater than \"%v\"", msgAndArgs...) } // GreaterOrEqual asserts that the first element is greater than or equal to the second @@ -404,8 +403,7 @@ func GreaterOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...in if h, ok := t.(tHelper); ok { h.Helper() } - failMessage := fmt.Sprintf("\"%v\" is not greater than or equal to \"%v\"", e1, e2) - return compareTwoValues(t, e1, e2, []compareResult{compareGreater, compareEqual}, failMessage, msgAndArgs...) + return compareTwoValues(t, e1, e2, []compareResult{compareGreater, compareEqual}, "\"%v\" is not greater than or equal to \"%v\"", msgAndArgs...) } // Less asserts that the first element is less than the second @@ -417,8 +415,7 @@ func Less(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) if h, ok := t.(tHelper); ok { h.Helper() } - failMessage := fmt.Sprintf("\"%v\" is not less than \"%v\"", e1, e2) - return compareTwoValues(t, e1, e2, []compareResult{compareLess}, failMessage, msgAndArgs...) + return compareTwoValues(t, e1, e2, []compareResult{compareLess}, "\"%v\" is not less than \"%v\"", msgAndArgs...) } // LessOrEqual asserts that the first element is less than or equal to the second @@ -431,8 +428,7 @@ func LessOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...inter if h, ok := t.(tHelper); ok { h.Helper() } - failMessage := fmt.Sprintf("\"%v\" is not less than or equal to \"%v\"", e1, e2) - return compareTwoValues(t, e1, e2, []compareResult{compareLess, compareEqual}, failMessage, msgAndArgs...) + return compareTwoValues(t, e1, e2, []compareResult{compareLess, compareEqual}, "\"%v\" is not less than or equal to \"%v\"", msgAndArgs...) } // Positive asserts that the specified element is positive @@ -444,8 +440,7 @@ func Positive(t TestingT, e interface{}, msgAndArgs ...interface{}) bool { h.Helper() } zero := reflect.Zero(reflect.TypeOf(e)) - failMessage := fmt.Sprintf("\"%v\" is not positive", e) - return compareTwoValues(t, e, zero.Interface(), []compareResult{compareGreater}, failMessage, msgAndArgs...) + return compareTwoValues(t, e, zero.Interface(), []compareResult{compareGreater}, "\"%v\" is not positive", msgAndArgs...) } // Negative asserts that the specified element is negative @@ -457,8 +452,7 @@ func Negative(t TestingT, e interface{}, msgAndArgs ...interface{}) bool { h.Helper() } zero := reflect.Zero(reflect.TypeOf(e)) - failMessage := fmt.Sprintf("\"%v\" is not negative", e) - return compareTwoValues(t, e, zero.Interface(), []compareResult{compareLess}, failMessage, msgAndArgs...) + return compareTwoValues(t, e, zero.Interface(), []compareResult{compareLess}, "\"%v\" is not negative", msgAndArgs...) } func compareTwoValues(t TestingT, e1 interface{}, e2 interface{}, allowedComparesResults []compareResult, failMessage string, msgAndArgs ...interface{}) bool { @@ -474,11 +468,11 @@ func compareTwoValues(t TestingT, e1 interface{}, e2 interface{}, allowedCompare compareResult, isComparable := compare(e1, e2, e1Kind) if !isComparable { - return Fail(t, fmt.Sprintf(`Can not compare type "%T"`, e1), msgAndArgs...) + return Fail(t, fmt.Sprintf("Can not compare type \"%s\"", reflect.TypeOf(e1)), msgAndArgs...) } if !containsValue(allowedComparesResults, compareResult) { - return Fail(t, failMessage, msgAndArgs...) + return Fail(t, fmt.Sprintf(failMessage, e1, e2), msgAndArgs...) } return true diff --git a/vendor/github.com/stretchr/testify/assert/assertion_format.go b/vendor/github.com/stretchr/testify/assert/assertion_format.go index c592f6ad..19063416 100644 --- a/vendor/github.com/stretchr/testify/assert/assertion_format.go +++ b/vendor/github.com/stretchr/testify/assert/assertion_format.go @@ -50,19 +50,10 @@ func ElementsMatchf(t TestingT, listA interface{}, listB interface{}, msg string return ElementsMatch(t, listA, listB, append([]interface{}{msg}, args...)...) } -// Emptyf asserts that the given value is "empty". -// -// [Zero values] are "empty". -// -// Arrays are "empty" if every element is the zero value of the type (stricter than "empty"). -// -// Slices, maps and channels with zero length are "empty". -// -// Pointer values are "empty" if the pointer is nil or if the pointed value is "empty". +// Emptyf asserts that the specified object is empty. I.e. nil, "", false, 0 or either +// a slice or a channel with len == 0. // // assert.Emptyf(t, obj, "error message %s", "formatted") -// -// [Zero values]: https://go.dev/ref/spec#The_zero_value func Emptyf(t TestingT, object interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -126,8 +117,10 @@ func EqualValuesf(t TestingT, expected interface{}, actual interface{}, msg stri // Errorf asserts that a function returned an error (i.e. not `nil`). // -// actualObj, err := SomeFunction() -// assert.Errorf(t, err, "error message %s", "formatted") +// actualObj, err := SomeFunction() +// if assert.Errorf(t, err, "error message %s", "formatted") { +// assert.Equal(t, expectedErrorf, err) +// } func Errorf(t TestingT, err error, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -445,19 +438,7 @@ func IsNonIncreasingf(t TestingT, object interface{}, msg string, args ...interf return IsNonIncreasing(t, object, append([]interface{}{msg}, args...)...) } -// IsNotTypef asserts that the specified objects are not of the same type. -// -// assert.IsNotTypef(t, &NotMyStruct{}, &MyStruct{}, "error message %s", "formatted") -func IsNotTypef(t TestingT, theType interface{}, object interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return IsNotType(t, theType, object, append([]interface{}{msg}, args...)...) -} - // IsTypef asserts that the specified objects are of the same type. -// -// assert.IsTypef(t, &MyStruct{}, &MyStruct{}, "error message %s", "formatted") func IsTypef(t TestingT, expectedType interface{}, object interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -604,7 +585,8 @@ func NotElementsMatchf(t TestingT, listA interface{}, listB interface{}, msg str return NotElementsMatch(t, listA, listB, append([]interface{}{msg}, args...)...) } -// NotEmptyf asserts that the specified object is NOT [Empty]. +// NotEmptyf asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either +// a slice or a channel with len == 0. // // if assert.NotEmptyf(t, obj, "error message %s", "formatted") { // assert.Equal(t, "two", obj[1]) @@ -711,15 +693,12 @@ func NotSamef(t TestingT, expected interface{}, actual interface{}, msg string, return NotSame(t, expected, actual, append([]interface{}{msg}, args...)...) } -// NotSubsetf asserts that the list (array, slice, or map) does NOT contain all -// elements given in the subset (array, slice, or map). -// Map elements are key-value pairs unless compared with an array or slice where -// only the map key is evaluated. +// NotSubsetf asserts that the specified list(array, slice...) or map does NOT +// contain all elements given in the specified subset list(array, slice...) or +// map. // // assert.NotSubsetf(t, [1, 3, 4], [1, 2], "error message %s", "formatted") // assert.NotSubsetf(t, {"x": 1, "y": 2}, {"z": 3}, "error message %s", "formatted") -// assert.NotSubsetf(t, [1, 3, 4], {1: "one", 2: "two"}, "error message %s", "formatted") -// assert.NotSubsetf(t, {"x": 1, "y": 2}, ["z"], "error message %s", "formatted") func NotSubsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -803,15 +782,11 @@ func Samef(t TestingT, expected interface{}, actual interface{}, msg string, arg return Same(t, expected, actual, append([]interface{}{msg}, args...)...) } -// Subsetf asserts that the list (array, slice, or map) contains all elements -// given in the subset (array, slice, or map). -// Map elements are key-value pairs unless compared with an array or slice where -// only the map key is evaluated. +// Subsetf asserts that the specified list(array, slice...) or map contains all +// elements given in the specified subset list(array, slice...) or map. // // assert.Subsetf(t, [1, 2, 3], [1, 2], "error message %s", "formatted") // assert.Subsetf(t, {"x": 1, "y": 2}, {"x": 1}, "error message %s", "formatted") -// assert.Subsetf(t, [1, 2, 3], {1: "one", 2: "two"}, "error message %s", "formatted") -// assert.Subsetf(t, {"x": 1, "y": 2}, ["x"], "error message %s", "formatted") func Subsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() diff --git a/vendor/github.com/stretchr/testify/assert/assertion_forward.go b/vendor/github.com/stretchr/testify/assert/assertion_forward.go index 58db9284..21629087 100644 --- a/vendor/github.com/stretchr/testify/assert/assertion_forward.go +++ b/vendor/github.com/stretchr/testify/assert/assertion_forward.go @@ -92,19 +92,10 @@ func (a *Assertions) ElementsMatchf(listA interface{}, listB interface{}, msg st return ElementsMatchf(a.t, listA, listB, msg, args...) } -// Empty asserts that the given value is "empty". -// -// [Zero values] are "empty". -// -// Arrays are "empty" if every element is the zero value of the type (stricter than "empty"). -// -// Slices, maps and channels with zero length are "empty". -// -// Pointer values are "empty" if the pointer is nil or if the pointed value is "empty". +// Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either +// a slice or a channel with len == 0. // // a.Empty(obj) -// -// [Zero values]: https://go.dev/ref/spec#The_zero_value func (a *Assertions) Empty(object interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -112,19 +103,10 @@ func (a *Assertions) Empty(object interface{}, msgAndArgs ...interface{}) bool { return Empty(a.t, object, msgAndArgs...) } -// Emptyf asserts that the given value is "empty". -// -// [Zero values] are "empty". -// -// Arrays are "empty" if every element is the zero value of the type (stricter than "empty"). -// -// Slices, maps and channels with zero length are "empty". -// -// Pointer values are "empty" if the pointer is nil or if the pointed value is "empty". +// Emptyf asserts that the specified object is empty. I.e. nil, "", false, 0 or either +// a slice or a channel with len == 0. // // a.Emptyf(obj, "error message %s", "formatted") -// -// [Zero values]: https://go.dev/ref/spec#The_zero_value func (a *Assertions) Emptyf(object interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -242,8 +224,10 @@ func (a *Assertions) Equalf(expected interface{}, actual interface{}, msg string // Error asserts that a function returned an error (i.e. not `nil`). // -// actualObj, err := SomeFunction() -// a.Error(err) +// actualObj, err := SomeFunction() +// if a.Error(err) { +// assert.Equal(t, expectedError, err) +// } func (a *Assertions) Error(err error, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -313,8 +297,10 @@ func (a *Assertions) ErrorIsf(err error, target error, msg string, args ...inter // Errorf asserts that a function returned an error (i.e. not `nil`). // -// actualObj, err := SomeFunction() -// a.Errorf(err, "error message %s", "formatted") +// actualObj, err := SomeFunction() +// if a.Errorf(err, "error message %s", "formatted") { +// assert.Equal(t, expectedErrorf, err) +// } func (a *Assertions) Errorf(err error, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -882,29 +868,7 @@ func (a *Assertions) IsNonIncreasingf(object interface{}, msg string, args ...in return IsNonIncreasingf(a.t, object, msg, args...) } -// IsNotType asserts that the specified objects are not of the same type. -// -// a.IsNotType(&NotMyStruct{}, &MyStruct{}) -func (a *Assertions) IsNotType(theType interface{}, object interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return IsNotType(a.t, theType, object, msgAndArgs...) -} - -// IsNotTypef asserts that the specified objects are not of the same type. -// -// a.IsNotTypef(&NotMyStruct{}, &MyStruct{}, "error message %s", "formatted") -func (a *Assertions) IsNotTypef(theType interface{}, object interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return IsNotTypef(a.t, theType, object, msg, args...) -} - // IsType asserts that the specified objects are of the same type. -// -// a.IsType(&MyStruct{}, &MyStruct{}) func (a *Assertions) IsType(expectedType interface{}, object interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -913,8 +877,6 @@ func (a *Assertions) IsType(expectedType interface{}, object interface{}, msgAnd } // IsTypef asserts that the specified objects are of the same type. -// -// a.IsTypef(&MyStruct{}, &MyStruct{}, "error message %s", "formatted") func (a *Assertions) IsTypef(expectedType interface{}, object interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1200,7 +1162,8 @@ func (a *Assertions) NotElementsMatchf(listA interface{}, listB interface{}, msg return NotElementsMatchf(a.t, listA, listB, msg, args...) } -// NotEmpty asserts that the specified object is NOT [Empty]. +// NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either +// a slice or a channel with len == 0. // // if a.NotEmpty(obj) { // assert.Equal(t, "two", obj[1]) @@ -1212,7 +1175,8 @@ func (a *Assertions) NotEmpty(object interface{}, msgAndArgs ...interface{}) boo return NotEmpty(a.t, object, msgAndArgs...) } -// NotEmptyf asserts that the specified object is NOT [Empty]. +// NotEmptyf asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either +// a slice or a channel with len == 0. // // if a.NotEmptyf(obj, "error message %s", "formatted") { // assert.Equal(t, "two", obj[1]) @@ -1414,15 +1378,12 @@ func (a *Assertions) NotSamef(expected interface{}, actual interface{}, msg stri return NotSamef(a.t, expected, actual, msg, args...) } -// NotSubset asserts that the list (array, slice, or map) does NOT contain all -// elements given in the subset (array, slice, or map). -// Map elements are key-value pairs unless compared with an array or slice where -// only the map key is evaluated. +// NotSubset asserts that the specified list(array, slice...) or map does NOT +// contain all elements given in the specified subset list(array, slice...) or +// map. // // a.NotSubset([1, 3, 4], [1, 2]) // a.NotSubset({"x": 1, "y": 2}, {"z": 3}) -// a.NotSubset([1, 3, 4], {1: "one", 2: "two"}) -// a.NotSubset({"x": 1, "y": 2}, ["z"]) func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1430,15 +1391,12 @@ func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs return NotSubset(a.t, list, subset, msgAndArgs...) } -// NotSubsetf asserts that the list (array, slice, or map) does NOT contain all -// elements given in the subset (array, slice, or map). -// Map elements are key-value pairs unless compared with an array or slice where -// only the map key is evaluated. +// NotSubsetf asserts that the specified list(array, slice...) or map does NOT +// contain all elements given in the specified subset list(array, slice...) or +// map. // // a.NotSubsetf([1, 3, 4], [1, 2], "error message %s", "formatted") // a.NotSubsetf({"x": 1, "y": 2}, {"z": 3}, "error message %s", "formatted") -// a.NotSubsetf([1, 3, 4], {1: "one", 2: "two"}, "error message %s", "formatted") -// a.NotSubsetf({"x": 1, "y": 2}, ["z"], "error message %s", "formatted") func (a *Assertions) NotSubsetf(list interface{}, subset interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1598,15 +1556,11 @@ func (a *Assertions) Samef(expected interface{}, actual interface{}, msg string, return Samef(a.t, expected, actual, msg, args...) } -// Subset asserts that the list (array, slice, or map) contains all elements -// given in the subset (array, slice, or map). -// Map elements are key-value pairs unless compared with an array or slice where -// only the map key is evaluated. +// Subset asserts that the specified list(array, slice...) or map contains all +// elements given in the specified subset list(array, slice...) or map. // // a.Subset([1, 2, 3], [1, 2]) // a.Subset({"x": 1, "y": 2}, {"x": 1}) -// a.Subset([1, 2, 3], {1: "one", 2: "two"}) -// a.Subset({"x": 1, "y": 2}, ["x"]) func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1614,15 +1568,11 @@ func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ... return Subset(a.t, list, subset, msgAndArgs...) } -// Subsetf asserts that the list (array, slice, or map) contains all elements -// given in the subset (array, slice, or map). -// Map elements are key-value pairs unless compared with an array or slice where -// only the map key is evaluated. +// Subsetf asserts that the specified list(array, slice...) or map contains all +// elements given in the specified subset list(array, slice...) or map. // // a.Subsetf([1, 2, 3], [1, 2], "error message %s", "formatted") // a.Subsetf({"x": 1, "y": 2}, {"x": 1}, "error message %s", "formatted") -// a.Subsetf([1, 2, 3], {1: "one", 2: "two"}, "error message %s", "formatted") -// a.Subsetf({"x": 1, "y": 2}, ["x"], "error message %s", "formatted") func (a *Assertions) Subsetf(list interface{}, subset interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() diff --git a/vendor/github.com/stretchr/testify/assert/assertion_order.go b/vendor/github.com/stretchr/testify/assert/assertion_order.go index 2fdf80fd..1d2f7182 100644 --- a/vendor/github.com/stretchr/testify/assert/assertion_order.go +++ b/vendor/github.com/stretchr/testify/assert/assertion_order.go @@ -33,7 +33,7 @@ func isOrdered(t TestingT, object interface{}, allowedComparesResults []compareR compareResult, isComparable := compare(prevValueInterface, valueInterface, firstValueKind) if !isComparable { - return Fail(t, fmt.Sprintf(`Can not compare type "%T" and "%T"`, value, prevValue), msgAndArgs...) + return Fail(t, fmt.Sprintf("Can not compare type \"%s\" and \"%s\"", reflect.TypeOf(value), reflect.TypeOf(prevValue)), msgAndArgs...) } if !containsValue(allowedComparesResults, compareResult) { diff --git a/vendor/github.com/stretchr/testify/assert/assertions.go b/vendor/github.com/stretchr/testify/assert/assertions.go index de8de0cb..4e91332b 100644 --- a/vendor/github.com/stretchr/testify/assert/assertions.go +++ b/vendor/github.com/stretchr/testify/assert/assertions.go @@ -210,77 +210,59 @@ the problem actually occurred in calling code.*/ // of each stack frame leading from the current test to the assert call that // failed. func CallerInfo() []string { + var pc uintptr + var ok bool var file string var line int var name string - const stackFrameBufferSize = 10 - pcs := make([]uintptr, stackFrameBufferSize) - callers := []string{} - offset := 1 - - for { - n := runtime.Callers(offset, pcs) - - if n == 0 { + for i := 0; ; i++ { + pc, file, line, ok = runtime.Caller(i) + if !ok { + // The breaks below failed to terminate the loop, and we ran off the + // end of the call stack. break } - frames := runtime.CallersFrames(pcs[:n]) + // This is a huge edge case, but it will panic if this is the case, see #180 + if file == "" { + break + } - for { - frame, more := frames.Next() - pc = frame.PC - file = frame.File - line = frame.Line + f := runtime.FuncForPC(pc) + if f == nil { + break + } + name = f.Name() - // This is a huge edge case, but it will panic if this is the case, see #180 - if file == "" { - break - } + // testing.tRunner is the standard library function that calls + // tests. Subtests are called directly by tRunner, without going through + // the Test/Benchmark/Example function that contains the t.Run calls, so + // with subtests we should break when we hit tRunner, without adding it + // to the list of callers. + if name == "testing.tRunner" { + break + } - f := runtime.FuncForPC(pc) - if f == nil { - break - } - name = f.Name() - - // testing.tRunner is the standard library function that calls - // tests. Subtests are called directly by tRunner, without going through - // the Test/Benchmark/Example function that contains the t.Run calls, so - // with subtests we should break when we hit tRunner, without adding it - // to the list of callers. - if name == "testing.tRunner" { - break - } - - parts := strings.Split(file, "/") - if len(parts) > 1 { - filename := parts[len(parts)-1] - dir := parts[len(parts)-2] - if (dir != "assert" && dir != "mock" && dir != "require") || filename == "mock_test.go" { - callers = append(callers, fmt.Sprintf("%s:%d", file, line)) - } - } - - // Drop the package - dotPos := strings.LastIndexByte(name, '.') - name = name[dotPos+1:] - if isTest(name, "Test") || - isTest(name, "Benchmark") || - isTest(name, "Example") { - break - } - - if !more { - break + parts := strings.Split(file, "/") + if len(parts) > 1 { + filename := parts[len(parts)-1] + dir := parts[len(parts)-2] + if (dir != "assert" && dir != "mock" && dir != "require") || filename == "mock_test.go" { + callers = append(callers, fmt.Sprintf("%s:%d", file, line)) } } - // Next batch - offset += cap(pcs) + // Drop the package + segments := strings.Split(name, ".") + name = segments[len(segments)-1] + if isTest(name, "Test") || + isTest(name, "Benchmark") || + isTest(name, "Example") { + break + } } return callers @@ -455,34 +437,17 @@ func NotImplements(t TestingT, interfaceObject interface{}, object interface{}, return true } -func isType(expectedType, object interface{}) bool { - return ObjectsAreEqual(reflect.TypeOf(object), reflect.TypeOf(expectedType)) -} - // IsType asserts that the specified objects are of the same type. -// -// assert.IsType(t, &MyStruct{}, &MyStruct{}) -func IsType(t TestingT, expectedType, object interface{}, msgAndArgs ...interface{}) bool { - if isType(expectedType, object) { - return true - } +func IsType(t TestingT, expectedType interface{}, object interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() } - return Fail(t, fmt.Sprintf("Object expected to be of type %T, but was %T", expectedType, object), msgAndArgs...) -} -// IsNotType asserts that the specified objects are not of the same type. -// -// assert.IsNotType(t, &NotMyStruct{}, &MyStruct{}) -func IsNotType(t TestingT, theType, object interface{}, msgAndArgs ...interface{}) bool { - if !isType(theType, object) { - return true + if !ObjectsAreEqual(reflect.TypeOf(object), reflect.TypeOf(expectedType)) { + return Fail(t, fmt.Sprintf("Object expected to be of type %v, but was %v", reflect.TypeOf(expectedType), reflect.TypeOf(object)), msgAndArgs...) } - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Fail(t, fmt.Sprintf("Object type expected to be different than %T", theType), msgAndArgs...) + + return true } // Equal asserts that two objects are equal. @@ -510,6 +475,7 @@ func Equal(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) } return true + } // validateEqualArgs checks whether provided arguments can be safely used in the @@ -544,9 +510,8 @@ func Same(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) b if !same { // both are pointers but not the same type & pointing to the same address return Fail(t, fmt.Sprintf("Not same: \n"+ - "expected: %p %#[1]v\n"+ - "actual : %p %#[2]v", - expected, actual), msgAndArgs...) + "expected: %p %#v\n"+ + "actual : %p %#v", expected, expected, actual, actual), msgAndArgs...) } return true @@ -565,14 +530,14 @@ func NotSame(t TestingT, expected, actual interface{}, msgAndArgs ...interface{} same, ok := samePointers(expected, actual) if !ok { - // fails when the arguments are not pointers + //fails when the arguments are not pointers return !(Fail(t, "Both arguments must be pointers", msgAndArgs...)) } if same { return Fail(t, fmt.Sprintf( - "Expected and actual point to the same object: %p %#[1]v", - expected), msgAndArgs...) + "Expected and actual point to the same object: %p %#v", + expected, expected), msgAndArgs...) } return true } @@ -584,7 +549,7 @@ func NotSame(t TestingT, expected, actual interface{}, msgAndArgs ...interface{} func samePointers(first, second interface{}) (same bool, ok bool) { firstPtr, secondPtr := reflect.ValueOf(first), reflect.ValueOf(second) if firstPtr.Kind() != reflect.Ptr || secondPtr.Kind() != reflect.Ptr { - return false, false // not both are pointers + return false, false //not both are pointers } firstType, secondType := reflect.TypeOf(first), reflect.TypeOf(second) @@ -645,6 +610,7 @@ func EqualValues(t TestingT, expected, actual interface{}, msgAndArgs ...interfa } return true + } // EqualExportedValues asserts that the types of two objects are equal and their public @@ -699,6 +665,7 @@ func Exactly(t TestingT, expected, actual interface{}, msgAndArgs ...interface{} } return Equal(t, expected, actual, msgAndArgs...) + } // NotNil asserts that the specified object is not nil. @@ -748,45 +715,37 @@ func Nil(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { // isEmpty gets whether the specified object is considered empty or not. func isEmpty(object interface{}) bool { + // get nil case out of the way if object == nil { return true } - return isEmptyValue(reflect.ValueOf(object)) -} + objValue := reflect.ValueOf(object) -// isEmptyValue gets whether the specified reflect.Value is considered empty or not. -func isEmptyValue(objValue reflect.Value) bool { - if objValue.IsZero() { - return true - } - // Special cases of non-zero values that we consider empty switch objValue.Kind() { // collection types are empty when they have no element - // Note: array types are empty when they match their zero-initialized state. case reflect.Chan, reflect.Map, reflect.Slice: return objValue.Len() == 0 - // non-nil pointers are empty if the value they point to is empty + // pointers are empty if nil or if the value they point to is empty case reflect.Ptr: - return isEmptyValue(objValue.Elem()) + if objValue.IsNil() { + return true + } + deref := objValue.Elem().Interface() + return isEmpty(deref) + // for all other types, compare against the zero value + // array types are empty when they match their zero-initialized state + default: + zero := reflect.Zero(objValue.Type()) + return reflect.DeepEqual(object, zero.Interface()) } - return false } -// Empty asserts that the given value is "empty". -// -// [Zero values] are "empty". -// -// Arrays are "empty" if every element is the zero value of the type (stricter than "empty"). -// -// Slices, maps and channels with zero length are "empty". -// -// Pointer values are "empty" if the pointer is nil or if the pointed value is "empty". +// Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either +// a slice or a channel with len == 0. // // assert.Empty(t, obj) -// -// [Zero values]: https://go.dev/ref/spec#The_zero_value func Empty(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { pass := isEmpty(object) if !pass { @@ -797,9 +756,11 @@ func Empty(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { } return pass + } -// NotEmpty asserts that the specified object is NOT [Empty]. +// NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either +// a slice or a channel with len == 0. // // if assert.NotEmpty(t, obj) { // assert.Equal(t, "two", obj[1]) @@ -814,6 +775,7 @@ func NotEmpty(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { } return pass + } // getLen tries to get the length of an object. @@ -857,6 +819,7 @@ func True(t TestingT, value bool, msgAndArgs ...interface{}) bool { } return true + } // False asserts that the specified value is false. @@ -871,6 +834,7 @@ func False(t TestingT, value bool, msgAndArgs ...interface{}) bool { } return true + } // NotEqual asserts that the specified values are NOT equal. @@ -893,6 +857,7 @@ func NotEqual(t TestingT, expected, actual interface{}, msgAndArgs ...interface{ } return true + } // NotEqualValues asserts that two objects are not equal even when converted to the same type @@ -915,6 +880,7 @@ func NotEqualValues(t TestingT, expected, actual interface{}, msgAndArgs ...inte // return (true, false) if element was not found. // return (true, true) if element was found. func containsElement(list interface{}, element interface{}) (ok, found bool) { + listValue := reflect.ValueOf(list) listType := reflect.TypeOf(list) if listType == nil { @@ -949,6 +915,7 @@ func containsElement(list interface{}, element interface{}) (ok, found bool) { } } return true, false + } // Contains asserts that the specified string, list(array, slice...) or map contains the @@ -971,6 +938,7 @@ func Contains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) bo } return true + } // NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the @@ -993,17 +961,14 @@ func NotContains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) } return true + } -// Subset asserts that the list (array, slice, or map) contains all elements -// given in the subset (array, slice, or map). -// Map elements are key-value pairs unless compared with an array or slice where -// only the map key is evaluated. +// Subset asserts that the specified list(array, slice...) or map contains all +// elements given in the specified subset list(array, slice...) or map. // // assert.Subset(t, [1, 2, 3], [1, 2]) // assert.Subset(t, {"x": 1, "y": 2}, {"x": 1}) -// assert.Subset(t, [1, 2, 3], {1: "one", 2: "two"}) -// assert.Subset(t, {"x": 1, "y": 2}, ["x"]) func Subset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok bool) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1018,7 +983,7 @@ func Subset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok } subsetKind := reflect.TypeOf(subset).Kind() - if subsetKind != reflect.Array && subsetKind != reflect.Slice && subsetKind != reflect.Map { + if subsetKind != reflect.Array && subsetKind != reflect.Slice && listKind != reflect.Map { return Fail(t, fmt.Sprintf("%q has an unsupported type %s", subset, subsetKind), msgAndArgs...) } @@ -1042,13 +1007,6 @@ func Subset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok } subsetList := reflect.ValueOf(subset) - if subsetKind == reflect.Map { - keys := make([]interface{}, subsetList.Len()) - for idx, key := range subsetList.MapKeys() { - keys[idx] = key.Interface() - } - subsetList = reflect.ValueOf(keys) - } for i := 0; i < subsetList.Len(); i++ { element := subsetList.Index(i).Interface() ok, found := containsElement(list, element) @@ -1063,15 +1021,12 @@ func Subset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok return true } -// NotSubset asserts that the list (array, slice, or map) does NOT contain all -// elements given in the subset (array, slice, or map). -// Map elements are key-value pairs unless compared with an array or slice where -// only the map key is evaluated. +// NotSubset asserts that the specified list(array, slice...) or map does NOT +// contain all elements given in the specified subset list(array, slice...) or +// map. // // assert.NotSubset(t, [1, 3, 4], [1, 2]) // assert.NotSubset(t, {"x": 1, "y": 2}, {"z": 3}) -// assert.NotSubset(t, [1, 3, 4], {1: "one", 2: "two"}) -// assert.NotSubset(t, {"x": 1, "y": 2}, ["z"]) func NotSubset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok bool) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1086,7 +1041,7 @@ func NotSubset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) } subsetKind := reflect.TypeOf(subset).Kind() - if subsetKind != reflect.Array && subsetKind != reflect.Slice && subsetKind != reflect.Map { + if subsetKind != reflect.Array && subsetKind != reflect.Slice && listKind != reflect.Map { return Fail(t, fmt.Sprintf("%q has an unsupported type %s", subset, subsetKind), msgAndArgs...) } @@ -1110,18 +1065,11 @@ func NotSubset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) } subsetList := reflect.ValueOf(subset) - if subsetKind == reflect.Map { - keys := make([]interface{}, subsetList.Len()) - for idx, key := range subsetList.MapKeys() { - keys[idx] = key.Interface() - } - subsetList = reflect.ValueOf(keys) - } for i := 0; i < subsetList.Len(); i++ { element := subsetList.Index(i).Interface() ok, found := containsElement(list, element) if !ok { - return Fail(t, fmt.Sprintf("%q could not be applied builtin len()", list), msgAndArgs...) + return Fail(t, fmt.Sprintf("\"%s\" could not be applied builtin len()", list), msgAndArgs...) } if !found { return true @@ -1643,8 +1591,10 @@ func NoError(t TestingT, err error, msgAndArgs ...interface{}) bool { // Error asserts that a function returned an error (i.e. not `nil`). // -// actualObj, err := SomeFunction() -// assert.Error(t, err) +// actualObj, err := SomeFunction() +// if assert.Error(t, err) { +// assert.Equal(t, expectedError, err) +// } func Error(t TestingT, err error, msgAndArgs ...interface{}) bool { if err == nil { if h, ok := t.(tHelper); ok { @@ -1717,6 +1667,7 @@ func matchRegexp(rx interface{}, str interface{}) bool { default: return r.MatchString(fmt.Sprint(v)) } + } // Regexp asserts that a specified regexp matches a string. @@ -1752,6 +1703,7 @@ func NotRegexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interf } return !match + } // Zero asserts that i is the zero value for its type. @@ -1862,11 +1814,6 @@ func JSONEq(t TestingT, expected string, actual string, msgAndArgs ...interface{ return Fail(t, fmt.Sprintf("Expected value ('%s') is not valid json.\nJSON parsing error: '%s'", expected, err.Error()), msgAndArgs...) } - // Shortcut if same bytes - if actual == expected { - return true - } - if err := json.Unmarshal([]byte(actual), &actualJSONAsInterface); err != nil { return Fail(t, fmt.Sprintf("Input ('%s') needs to be valid json.\nJSON parsing error: '%s'", actual, err.Error()), msgAndArgs...) } @@ -1885,11 +1832,6 @@ func YAMLEq(t TestingT, expected string, actual string, msgAndArgs ...interface{ return Fail(t, fmt.Sprintf("Expected value ('%s') is not valid yaml.\nYAML parsing error: '%s'", expected, err.Error()), msgAndArgs...) } - // Shortcut if same bytes - if actual == expected { - return true - } - if err := yaml.Unmarshal([]byte(actual), &actualYAMLAsInterface); err != nil { return Fail(t, fmt.Sprintf("Input ('%s') needs to be valid yaml.\nYAML error: '%s'", actual, err.Error()), msgAndArgs...) } @@ -1991,7 +1933,6 @@ func Eventually(t TestingT, condition func() bool, waitFor time.Duration, tick t } ch := make(chan bool, 1) - checkCond := func() { ch <- condition() } timer := time.NewTimer(waitFor) defer timer.Stop() @@ -1999,23 +1940,18 @@ func Eventually(t TestingT, condition func() bool, waitFor time.Duration, tick t ticker := time.NewTicker(tick) defer ticker.Stop() - var tickC <-chan time.Time - - // Check the condition once first on the initial call. - go checkCond() - - for { + for tick := ticker.C; ; { select { case <-timer.C: return Fail(t, "Condition never satisfied", msgAndArgs...) - case <-tickC: - tickC = nil - go checkCond() + case <-tick: + tick = nil + go func() { ch <- condition() }() case v := <-ch: if v { return true } - tickC = ticker.C + tick = ticker.C } } } @@ -2028,9 +1964,6 @@ type CollectT struct { errors []error } -// Helper is like [testing.T.Helper] but does nothing. -func (CollectT) Helper() {} - // Errorf collects the error. func (c *CollectT) Errorf(format string, args ...interface{}) { c.errors = append(c.errors, fmt.Errorf(format, args...)) @@ -2088,42 +2021,35 @@ func EventuallyWithT(t TestingT, condition func(collect *CollectT), waitFor time var lastFinishedTickErrs []error ch := make(chan *CollectT, 1) - checkCond := func() { - collect := new(CollectT) - defer func() { - ch <- collect - }() - condition(collect) - } - timer := time.NewTimer(waitFor) defer timer.Stop() ticker := time.NewTicker(tick) defer ticker.Stop() - var tickC <-chan time.Time - - // Check the condition once first on the initial call. - go checkCond() - - for { + for tick := ticker.C; ; { select { case <-timer.C: for _, err := range lastFinishedTickErrs { t.Errorf("%v", err) } return Fail(t, "Condition never satisfied", msgAndArgs...) - case <-tickC: - tickC = nil - go checkCond() + case <-tick: + tick = nil + go func() { + collect := new(CollectT) + defer func() { + ch <- collect + }() + condition(collect) + }() case collect := <-ch: if !collect.failed() { return true } // Keep the errors from the last ended condition, so that they can be copied to t if timeout is reached. lastFinishedTickErrs = collect.errors - tickC = ticker.C + tick = ticker.C } } } @@ -2138,7 +2064,6 @@ func Never(t TestingT, condition func() bool, waitFor time.Duration, tick time.D } ch := make(chan bool, 1) - checkCond := func() { ch <- condition() } timer := time.NewTimer(waitFor) defer timer.Stop() @@ -2146,23 +2071,18 @@ func Never(t TestingT, condition func() bool, waitFor time.Duration, tick time.D ticker := time.NewTicker(tick) defer ticker.Stop() - var tickC <-chan time.Time - - // Check the condition once first on the initial call. - go checkCond() - - for { + for tick := ticker.C; ; { select { case <-timer.C: return true - case <-tickC: - tickC = nil - go checkCond() + case <-tick: + tick = nil + go func() { ch <- condition() }() case v := <-ch: if v { return Fail(t, "Condition satisfied", msgAndArgs...) } - tickC = ticker.C + tick = ticker.C } } } @@ -2180,12 +2100,9 @@ func ErrorIs(t TestingT, err, target error, msgAndArgs ...interface{}) bool { var expectedText string if target != nil { expectedText = target.Error() - if err == nil { - return Fail(t, fmt.Sprintf("Expected error with %q in chain but got nil.", expectedText), msgAndArgs...) - } } - chain := buildErrorChainString(err, false) + chain := buildErrorChainString(err) return Fail(t, fmt.Sprintf("Target error should be in err chain:\n"+ "expected: %q\n"+ @@ -2208,7 +2125,7 @@ func NotErrorIs(t TestingT, err, target error, msgAndArgs ...interface{}) bool { expectedText = target.Error() } - chain := buildErrorChainString(err, false) + chain := buildErrorChainString(err) return Fail(t, fmt.Sprintf("Target error should not be in err chain:\n"+ "found: %q\n"+ @@ -2226,17 +2143,11 @@ func ErrorAs(t TestingT, err error, target interface{}, msgAndArgs ...interface{ return true } - expectedType := reflect.TypeOf(target).Elem().String() - if err == nil { - return Fail(t, fmt.Sprintf("An error is expected but got nil.\n"+ - "expected: %s", expectedType), msgAndArgs...) - } - - chain := buildErrorChainString(err, true) + chain := buildErrorChainString(err) return Fail(t, fmt.Sprintf("Should be in error chain:\n"+ - "expected: %s\n"+ - "in chain: %s", expectedType, chain, + "expected: %q\n"+ + "in chain: %s", target, chain, ), msgAndArgs...) } @@ -2250,46 +2161,24 @@ func NotErrorAs(t TestingT, err error, target interface{}, msgAndArgs ...interfa return true } - chain := buildErrorChainString(err, true) + chain := buildErrorChainString(err) return Fail(t, fmt.Sprintf("Target error should not be in err chain:\n"+ - "found: %s\n"+ - "in chain: %s", reflect.TypeOf(target).Elem().String(), chain, + "found: %q\n"+ + "in chain: %s", target, chain, ), msgAndArgs...) } -func unwrapAll(err error) (errs []error) { - errs = append(errs, err) - switch x := err.(type) { - case interface{ Unwrap() error }: - err = x.Unwrap() - if err == nil { - return - } - errs = append(errs, unwrapAll(err)...) - case interface{ Unwrap() []error }: - for _, err := range x.Unwrap() { - errs = append(errs, unwrapAll(err)...) - } - } - return -} - -func buildErrorChainString(err error, withType bool) string { +func buildErrorChainString(err error) string { if err == nil { return "" } - var chain string - errs := unwrapAll(err) - for i := range errs { - if i != 0 { - chain += "\n\t" - } - chain += fmt.Sprintf("%q", errs[i].Error()) - if withType { - chain += fmt.Sprintf(" (%T)", errs[i]) - } + e := errors.Unwrap(err) + chain := fmt.Sprintf("%q", err.Error()) + for e != nil { + chain += fmt.Sprintf("\n\t%q", e.Error()) + e = errors.Unwrap(e) } return chain } diff --git a/vendor/github.com/stretchr/testify/assert/doc.go b/vendor/github.com/stretchr/testify/assert/doc.go index a0b953aa..4953981d 100644 --- a/vendor/github.com/stretchr/testify/assert/doc.go +++ b/vendor/github.com/stretchr/testify/assert/doc.go @@ -1,9 +1,5 @@ // Package assert provides a set of comprehensive testing tools for use with the normal Go testing system. // -// # Note -// -// All functions in this package return a bool value indicating whether the assertion has passed. -// // # Example Usage // // The following is a complete example using assert in a standard test function: diff --git a/vendor/github.com/stretchr/testify/assert/http_assertions.go b/vendor/github.com/stretchr/testify/assert/http_assertions.go index 5a6bb75f..861ed4b7 100644 --- a/vendor/github.com/stretchr/testify/assert/http_assertions.go +++ b/vendor/github.com/stretchr/testify/assert/http_assertions.go @@ -138,7 +138,7 @@ func HTTPBodyContains(t TestingT, handler http.HandlerFunc, method, url string, contains := strings.Contains(body, fmt.Sprint(str)) if !contains { - Fail(t, fmt.Sprintf("Expected response body for %q to contain %q but found %q", url+"?"+values.Encode(), str, body), msgAndArgs...) + Fail(t, fmt.Sprintf("Expected response body for \"%s\" to contain \"%s\" but found \"%s\"", url+"?"+values.Encode(), str, body), msgAndArgs...) } return contains @@ -158,7 +158,7 @@ func HTTPBodyNotContains(t TestingT, handler http.HandlerFunc, method, url strin contains := strings.Contains(body, fmt.Sprint(str)) if contains { - Fail(t, fmt.Sprintf("Expected response body for %q to NOT contain %q but found %q", url+"?"+values.Encode(), str, body), msgAndArgs...) + Fail(t, fmt.Sprintf("Expected response body for \"%s\" to NOT contain \"%s\" but found \"%s\"", url+"?"+values.Encode(), str, body), msgAndArgs...) } return !contains diff --git a/vendor/github.com/stretchr/testify/assert/yaml/yaml_custom.go b/vendor/github.com/stretchr/testify/assert/yaml/yaml_custom.go index 5a74c4f4..baa0cc7d 100644 --- a/vendor/github.com/stretchr/testify/assert/yaml/yaml_custom.go +++ b/vendor/github.com/stretchr/testify/assert/yaml/yaml_custom.go @@ -1,4 +1,5 @@ //go:build testify_yaml_custom && !testify_yaml_fail && !testify_yaml_default +// +build testify_yaml_custom,!testify_yaml_fail,!testify_yaml_default // Package yaml is an implementation of YAML functions that calls a pluggable implementation. // diff --git a/vendor/github.com/stretchr/testify/assert/yaml/yaml_default.go b/vendor/github.com/stretchr/testify/assert/yaml/yaml_default.go index 0bae80e3..b83c6cf6 100644 --- a/vendor/github.com/stretchr/testify/assert/yaml/yaml_default.go +++ b/vendor/github.com/stretchr/testify/assert/yaml/yaml_default.go @@ -1,4 +1,5 @@ //go:build !testify_yaml_fail && !testify_yaml_custom +// +build !testify_yaml_fail,!testify_yaml_custom // Package yaml is just an indirection to handle YAML deserialization. // diff --git a/vendor/github.com/stretchr/testify/assert/yaml/yaml_fail.go b/vendor/github.com/stretchr/testify/assert/yaml/yaml_fail.go index 8041803f..e78f7dfe 100644 --- a/vendor/github.com/stretchr/testify/assert/yaml/yaml_fail.go +++ b/vendor/github.com/stretchr/testify/assert/yaml/yaml_fail.go @@ -1,4 +1,5 @@ //go:build testify_yaml_fail && !testify_yaml_custom && !testify_yaml_default +// +build testify_yaml_fail,!testify_yaml_custom,!testify_yaml_default // Package yaml is an implementation of YAML functions that always fail. // diff --git a/vendor/github.com/stretchr/testify/mock/mock.go b/vendor/github.com/stretchr/testify/mock/mock.go index 114fca61..eb5682df 100644 --- a/vendor/github.com/stretchr/testify/mock/mock.go +++ b/vendor/github.com/stretchr/testify/mock/mock.go @@ -208,16 +208,9 @@ func (c *Call) On(methodName string, arguments ...interface{}) *Call { return c.Parent.On(methodName, arguments...) } -// Unset removes all mock handlers that satisfy the call instance arguments from being -// called. Only supported on call instances with static input arguments. +// Unset removes a mock handler from being called. // -// For example, the only handler remaining after the following would be "MyMethod(2, 2)": -// -// Mock. -// On("MyMethod", 2, 2).Return(0). -// On("MyMethod", 3, 3).Return(0). -// On("MyMethod", Anything, Anything).Return(0) -// Mock.On("MyMethod", 3, 3).Unset() +// test.On("func", mock.Anything).Unset() func (c *Call) Unset() *Call { var unlockOnce sync.Once @@ -338,10 +331,7 @@ func (m *Mock) TestData() objx.Map { Setting expectations */ -// Test sets the [TestingT] on which errors will be reported, otherwise errors -// will cause a panic. -// Test should not be called on an object that is going to be used in a -// goroutine other than the one running the test function. +// Test sets the test struct variable of the mock object func (m *Mock) Test(t TestingT) { m.mutex.Lock() defer m.mutex.Unlock() @@ -504,7 +494,7 @@ func (m *Mock) MethodCalled(methodName string, arguments ...interface{}) Argumen // expected call found, but it has already been called with repeatable times if call != nil { m.mutex.Unlock() - m.fail("\nassert: mock: The method has been called over %d times.\n\tEither do one more Mock.On(%#v).Return(...), or remove extra call.\n\tThis call was unexpected:\n\t\t%s\n\tat: %s", call.totalCalls, methodName, callString(methodName, arguments, true), assert.CallerInfo()) + m.fail("\nassert: mock: The method has been called over %d times.\n\tEither do one more Mock.On(\"%s\").Return(...), or remove extra call.\n\tThis call was unexpected:\n\t\t%s\n\tat: %s", call.totalCalls, methodName, callString(methodName, arguments, true), assert.CallerInfo()) } // we have to fail here - because we don't know what to do // as the return arguments. This is because: @@ -524,7 +514,7 @@ func (m *Mock) MethodCalled(methodName string, arguments ...interface{}) Argumen assert.CallerInfo(), ) } else { - m.fail("\nassert: mock: I don't know what to return because the method call was unexpected.\n\tEither do Mock.On(%#v).Return(...) first, or remove the %s() call.\n\tThis method was unexpected:\n\t\t%s\n\tat: %s", methodName, methodName, callString(methodName, arguments, true), assert.CallerInfo()) + m.fail("\nassert: mock: I don't know what to return because the method call was unexpected.\n\tEither do Mock.On(\"%s\").Return(...) first, or remove the %s() call.\n\tThis method was unexpected:\n\t\t%s\n\tat: %s", methodName, methodName, callString(methodName, arguments, true), assert.CallerInfo()) } } @@ -671,7 +661,7 @@ func (m *Mock) AssertNumberOfCalls(t TestingT, methodName string, expectedCalls actualCalls++ } } - return assert.Equal(t, expectedCalls, actualCalls, fmt.Sprintf("Expected number of calls (%d) of method %s does not match the actual number of calls (%d).", expectedCalls, methodName, actualCalls)) + return assert.Equal(t, expectedCalls, actualCalls, fmt.Sprintf("Expected number of calls (%d) does not match the actual number of calls (%d).", expectedCalls, actualCalls)) } // AssertCalled asserts that the method was called. @@ -948,8 +938,6 @@ func (args Arguments) Is(objects ...interface{}) bool { return true } -type outputRenderer func() string - // Diff gets a string describing the differences between the arguments // and the specified objects. // @@ -957,7 +945,7 @@ type outputRenderer func() string func (args Arguments) Diff(objects []interface{}) (string, int) { // TODO: could return string as error and nil for No difference - var outputBuilder strings.Builder + output := "\n" var differences int maxArgCount := len(args) @@ -965,35 +953,24 @@ func (args Arguments) Diff(objects []interface{}) (string, int) { maxArgCount = len(objects) } - outputRenderers := []outputRenderer{} - for i := 0; i < maxArgCount; i++ { - i := i var actual, expected interface{} - var actualFmt, expectedFmt func() string + var actualFmt, expectedFmt string if len(objects) <= i { actual = "(Missing)" - actualFmt = func() string { - return "(Missing)" - } + actualFmt = "(Missing)" } else { actual = objects[i] - actualFmt = func() string { - return fmt.Sprintf("(%[1]T=%[1]v)", actual) - } + actualFmt = fmt.Sprintf("(%[1]T=%[1]v)", actual) } if len(args) <= i { expected = "(Missing)" - expectedFmt = func() string { - return "(Missing)" - } + expectedFmt = "(Missing)" } else { expected = args[i] - expectedFmt = func() string { - return fmt.Sprintf("(%[1]T=%[1]v)", expected) - } + expectedFmt = fmt.Sprintf("(%[1]T=%[1]v)", expected) } if matcher, ok := expected.(argumentMatcher); ok { @@ -1001,22 +978,16 @@ func (args Arguments) Diff(objects []interface{}) (string, int) { func() { defer func() { if r := recover(); r != nil { - actualFmt = func() string { - return fmt.Sprintf("panic in argument matcher: %v", r) - } + actualFmt = fmt.Sprintf("panic in argument matcher: %v", r) } }() matches = matcher.Matches(actual) }() if matches { - outputRenderers = append(outputRenderers, func() string { - return fmt.Sprintf("\t%d: PASS: %s matched by %s\n", i, actualFmt(), matcher) - }) + output = fmt.Sprintf("%s\t%d: PASS: %s matched by %s\n", output, i, actualFmt, matcher) } else { differences++ - outputRenderers = append(outputRenderers, func() string { - return fmt.Sprintf("\t%d: FAIL: %s not matched by %s\n", i, actualFmt(), matcher) - }) + output = fmt.Sprintf("%s\t%d: FAIL: %s not matched by %s\n", output, i, actualFmt, matcher) } } else { switch expected := expected.(type) { @@ -1025,17 +996,13 @@ func (args Arguments) Diff(objects []interface{}) (string, int) { if reflect.TypeOf(actual).Name() != string(expected) && reflect.TypeOf(actual).String() != string(expected) { // not match differences++ - outputRenderers = append(outputRenderers, func() string { - return fmt.Sprintf("\t%d: FAIL: type %s != type %s - %s\n", i, expected, reflect.TypeOf(actual).Name(), actualFmt()) - }) + output = fmt.Sprintf("%s\t%d: FAIL: type %s != type %s - %s\n", output, i, expected, reflect.TypeOf(actual).Name(), actualFmt) } case *IsTypeArgument: actualT := reflect.TypeOf(actual) if actualT != expected.t { differences++ - outputRenderers = append(outputRenderers, func() string { - return fmt.Sprintf("\t%d: FAIL: type %s != type %s - %s\n", i, expected.t.Name(), actualT.Name(), actualFmt()) - }) + output = fmt.Sprintf("%s\t%d: FAIL: type %s != type %s - %s\n", output, i, expected.t.Name(), actualT.Name(), actualFmt) } case *FunctionalOptionsArgument: var name string @@ -1046,36 +1013,26 @@ func (args Arguments) Diff(objects []interface{}) (string, int) { const tName = "[]interface{}" if name != reflect.TypeOf(actual).String() && len(expected.values) != 0 { differences++ - outputRenderers = append(outputRenderers, func() string { - return fmt.Sprintf("\t%d: FAIL: type %s != type %s - %s\n", i, tName, reflect.TypeOf(actual).Name(), actualFmt()) - }) + output = fmt.Sprintf("%s\t%d: FAIL: type %s != type %s - %s\n", output, i, tName, reflect.TypeOf(actual).Name(), actualFmt) } else { if ef, af := assertOpts(expected.values, actual); ef == "" && af == "" { // match - outputRenderers = append(outputRenderers, func() string { - return fmt.Sprintf("\t%d: PASS: %s == %s\n", i, tName, tName) - }) + output = fmt.Sprintf("%s\t%d: PASS: %s == %s\n", output, i, tName, tName) } else { // not match differences++ - outputRenderers = append(outputRenderers, func() string { - return fmt.Sprintf("\t%d: FAIL: %s != %s\n", i, af, ef) - }) + output = fmt.Sprintf("%s\t%d: FAIL: %s != %s\n", output, i, af, ef) } } default: if assert.ObjectsAreEqual(expected, Anything) || assert.ObjectsAreEqual(actual, Anything) || assert.ObjectsAreEqual(actual, expected) { // match - outputRenderers = append(outputRenderers, func() string { - return fmt.Sprintf("\t%d: PASS: %s == %s\n", i, actualFmt(), expectedFmt()) - }) + output = fmt.Sprintf("%s\t%d: PASS: %s == %s\n", output, i, actualFmt, expectedFmt) } else { // not match differences++ - outputRenderers = append(outputRenderers, func() string { - return fmt.Sprintf("\t%d: FAIL: %s != %s\n", i, actualFmt(), expectedFmt()) - }) + output = fmt.Sprintf("%s\t%d: FAIL: %s != %s\n", output, i, actualFmt, expectedFmt) } } } @@ -1086,12 +1043,7 @@ func (args Arguments) Diff(objects []interface{}) (string, int) { return "No differences.", differences } - outputBuilder.WriteString("\n") - for _, r := range outputRenderers { - outputBuilder.WriteString(r()) - } - - return outputBuilder.String(), differences + return output, differences } // Assert compares the arguments with the specified objects and fails if diff --git a/vendor/github.com/stretchr/testify/require/doc.go b/vendor/github.com/stretchr/testify/require/doc.go index c8e3f94a..96843472 100644 --- a/vendor/github.com/stretchr/testify/require/doc.go +++ b/vendor/github.com/stretchr/testify/require/doc.go @@ -23,8 +23,6 @@ // // The `require` package have same global functions as in the `assert` package, // but instead of returning a boolean result they call `t.FailNow()`. -// A consequence of this is that it must be called from the goroutine running -// the test function, not from other goroutines created during the test. // // Every assertion function also takes an optional string message as the final argument, // allowing custom error messages to be appended to the message the assertion method outputs. diff --git a/vendor/github.com/stretchr/testify/require/require.go b/vendor/github.com/stretchr/testify/require/require.go index 2d02f9bc..d8921950 100644 --- a/vendor/github.com/stretchr/testify/require/require.go +++ b/vendor/github.com/stretchr/testify/require/require.go @@ -117,19 +117,10 @@ func ElementsMatchf(t TestingT, listA interface{}, listB interface{}, msg string t.FailNow() } -// Empty asserts that the given value is "empty". -// -// [Zero values] are "empty". -// -// Arrays are "empty" if every element is the zero value of the type (stricter than "empty"). -// -// Slices, maps and channels with zero length are "empty". -// -// Pointer values are "empty" if the pointer is nil or if the pointed value is "empty". +// Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either +// a slice or a channel with len == 0. // // require.Empty(t, obj) -// -// [Zero values]: https://go.dev/ref/spec#The_zero_value func Empty(t TestingT, object interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -140,19 +131,10 @@ func Empty(t TestingT, object interface{}, msgAndArgs ...interface{}) { t.FailNow() } -// Emptyf asserts that the given value is "empty". -// -// [Zero values] are "empty". -// -// Arrays are "empty" if every element is the zero value of the type (stricter than "empty"). -// -// Slices, maps and channels with zero length are "empty". -// -// Pointer values are "empty" if the pointer is nil or if the pointed value is "empty". +// Emptyf asserts that the specified object is empty. I.e. nil, "", false, 0 or either +// a slice or a channel with len == 0. // // require.Emptyf(t, obj, "error message %s", "formatted") -// -// [Zero values]: https://go.dev/ref/spec#The_zero_value func Emptyf(t TestingT, object interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -297,8 +279,10 @@ func Equalf(t TestingT, expected interface{}, actual interface{}, msg string, ar // Error asserts that a function returned an error (i.e. not `nil`). // -// actualObj, err := SomeFunction() -// require.Error(t, err) +// actualObj, err := SomeFunction() +// if require.Error(t, err) { +// require.Equal(t, expectedError, err) +// } func Error(t TestingT, err error, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -389,8 +373,10 @@ func ErrorIsf(t TestingT, err error, target error, msg string, args ...interface // Errorf asserts that a function returned an error (i.e. not `nil`). // -// actualObj, err := SomeFunction() -// require.Errorf(t, err, "error message %s", "formatted") +// actualObj, err := SomeFunction() +// if require.Errorf(t, err, "error message %s", "formatted") { +// require.Equal(t, expectedErrorf, err) +// } func Errorf(t TestingT, err error, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1111,35 +1097,7 @@ func IsNonIncreasingf(t TestingT, object interface{}, msg string, args ...interf t.FailNow() } -// IsNotType asserts that the specified objects are not of the same type. -// -// require.IsNotType(t, &NotMyStruct{}, &MyStruct{}) -func IsNotType(t TestingT, theType interface{}, object interface{}, msgAndArgs ...interface{}) { - if h, ok := t.(tHelper); ok { - h.Helper() - } - if assert.IsNotType(t, theType, object, msgAndArgs...) { - return - } - t.FailNow() -} - -// IsNotTypef asserts that the specified objects are not of the same type. -// -// require.IsNotTypef(t, &NotMyStruct{}, &MyStruct{}, "error message %s", "formatted") -func IsNotTypef(t TestingT, theType interface{}, object interface{}, msg string, args ...interface{}) { - if h, ok := t.(tHelper); ok { - h.Helper() - } - if assert.IsNotTypef(t, theType, object, msg, args...) { - return - } - t.FailNow() -} - // IsType asserts that the specified objects are of the same type. -// -// require.IsType(t, &MyStruct{}, &MyStruct{}) func IsType(t TestingT, expectedType interface{}, object interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1151,8 +1109,6 @@ func IsType(t TestingT, expectedType interface{}, object interface{}, msgAndArgs } // IsTypef asserts that the specified objects are of the same type. -// -// require.IsTypef(t, &MyStruct{}, &MyStruct{}, "error message %s", "formatted") func IsTypef(t TestingT, expectedType interface{}, object interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1513,7 +1469,8 @@ func NotElementsMatchf(t TestingT, listA interface{}, listB interface{}, msg str t.FailNow() } -// NotEmpty asserts that the specified object is NOT [Empty]. +// NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either +// a slice or a channel with len == 0. // // if require.NotEmpty(t, obj) { // require.Equal(t, "two", obj[1]) @@ -1528,7 +1485,8 @@ func NotEmpty(t TestingT, object interface{}, msgAndArgs ...interface{}) { t.FailNow() } -// NotEmptyf asserts that the specified object is NOT [Empty]. +// NotEmptyf asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either +// a slice or a channel with len == 0. // // if require.NotEmptyf(t, obj, "error message %s", "formatted") { // require.Equal(t, "two", obj[1]) @@ -1787,15 +1745,12 @@ func NotSamef(t TestingT, expected interface{}, actual interface{}, msg string, t.FailNow() } -// NotSubset asserts that the list (array, slice, or map) does NOT contain all -// elements given in the subset (array, slice, or map). -// Map elements are key-value pairs unless compared with an array or slice where -// only the map key is evaluated. +// NotSubset asserts that the specified list(array, slice...) or map does NOT +// contain all elements given in the specified subset list(array, slice...) or +// map. // // require.NotSubset(t, [1, 3, 4], [1, 2]) // require.NotSubset(t, {"x": 1, "y": 2}, {"z": 3}) -// require.NotSubset(t, [1, 3, 4], {1: "one", 2: "two"}) -// require.NotSubset(t, {"x": 1, "y": 2}, ["z"]) func NotSubset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1806,15 +1761,12 @@ func NotSubset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...i t.FailNow() } -// NotSubsetf asserts that the list (array, slice, or map) does NOT contain all -// elements given in the subset (array, slice, or map). -// Map elements are key-value pairs unless compared with an array or slice where -// only the map key is evaluated. +// NotSubsetf asserts that the specified list(array, slice...) or map does NOT +// contain all elements given in the specified subset list(array, slice...) or +// map. // // require.NotSubsetf(t, [1, 3, 4], [1, 2], "error message %s", "formatted") // require.NotSubsetf(t, {"x": 1, "y": 2}, {"z": 3}, "error message %s", "formatted") -// require.NotSubsetf(t, [1, 3, 4], {1: "one", 2: "two"}, "error message %s", "formatted") -// require.NotSubsetf(t, {"x": 1, "y": 2}, ["z"], "error message %s", "formatted") func NotSubsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -2019,15 +1971,11 @@ func Samef(t TestingT, expected interface{}, actual interface{}, msg string, arg t.FailNow() } -// Subset asserts that the list (array, slice, or map) contains all elements -// given in the subset (array, slice, or map). -// Map elements are key-value pairs unless compared with an array or slice where -// only the map key is evaluated. +// Subset asserts that the specified list(array, slice...) or map contains all +// elements given in the specified subset list(array, slice...) or map. // // require.Subset(t, [1, 2, 3], [1, 2]) // require.Subset(t, {"x": 1, "y": 2}, {"x": 1}) -// require.Subset(t, [1, 2, 3], {1: "one", 2: "two"}) -// require.Subset(t, {"x": 1, "y": 2}, ["x"]) func Subset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -2038,15 +1986,11 @@ func Subset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...inte t.FailNow() } -// Subsetf asserts that the list (array, slice, or map) contains all elements -// given in the subset (array, slice, or map). -// Map elements are key-value pairs unless compared with an array or slice where -// only the map key is evaluated. +// Subsetf asserts that the specified list(array, slice...) or map contains all +// elements given in the specified subset list(array, slice...) or map. // // require.Subsetf(t, [1, 2, 3], [1, 2], "error message %s", "formatted") // require.Subsetf(t, {"x": 1, "y": 2}, {"x": 1}, "error message %s", "formatted") -// require.Subsetf(t, [1, 2, 3], {1: "one", 2: "two"}, "error message %s", "formatted") -// require.Subsetf(t, {"x": 1, "y": 2}, ["x"], "error message %s", "formatted") func Subsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() diff --git a/vendor/github.com/stretchr/testify/require/require_forward.go b/vendor/github.com/stretchr/testify/require/require_forward.go index e6f7e944..1bd87304 100644 --- a/vendor/github.com/stretchr/testify/require/require_forward.go +++ b/vendor/github.com/stretchr/testify/require/require_forward.go @@ -93,19 +93,10 @@ func (a *Assertions) ElementsMatchf(listA interface{}, listB interface{}, msg st ElementsMatchf(a.t, listA, listB, msg, args...) } -// Empty asserts that the given value is "empty". -// -// [Zero values] are "empty". -// -// Arrays are "empty" if every element is the zero value of the type (stricter than "empty"). -// -// Slices, maps and channels with zero length are "empty". -// -// Pointer values are "empty" if the pointer is nil or if the pointed value is "empty". +// Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either +// a slice or a channel with len == 0. // // a.Empty(obj) -// -// [Zero values]: https://go.dev/ref/spec#The_zero_value func (a *Assertions) Empty(object interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -113,19 +104,10 @@ func (a *Assertions) Empty(object interface{}, msgAndArgs ...interface{}) { Empty(a.t, object, msgAndArgs...) } -// Emptyf asserts that the given value is "empty". -// -// [Zero values] are "empty". -// -// Arrays are "empty" if every element is the zero value of the type (stricter than "empty"). -// -// Slices, maps and channels with zero length are "empty". -// -// Pointer values are "empty" if the pointer is nil or if the pointed value is "empty". +// Emptyf asserts that the specified object is empty. I.e. nil, "", false, 0 or either +// a slice or a channel with len == 0. // // a.Emptyf(obj, "error message %s", "formatted") -// -// [Zero values]: https://go.dev/ref/spec#The_zero_value func (a *Assertions) Emptyf(object interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -243,8 +225,10 @@ func (a *Assertions) Equalf(expected interface{}, actual interface{}, msg string // Error asserts that a function returned an error (i.e. not `nil`). // -// actualObj, err := SomeFunction() -// a.Error(err) +// actualObj, err := SomeFunction() +// if a.Error(err) { +// assert.Equal(t, expectedError, err) +// } func (a *Assertions) Error(err error, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -314,8 +298,10 @@ func (a *Assertions) ErrorIsf(err error, target error, msg string, args ...inter // Errorf asserts that a function returned an error (i.e. not `nil`). // -// actualObj, err := SomeFunction() -// a.Errorf(err, "error message %s", "formatted") +// actualObj, err := SomeFunction() +// if a.Errorf(err, "error message %s", "formatted") { +// assert.Equal(t, expectedErrorf, err) +// } func (a *Assertions) Errorf(err error, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -883,29 +869,7 @@ func (a *Assertions) IsNonIncreasingf(object interface{}, msg string, args ...in IsNonIncreasingf(a.t, object, msg, args...) } -// IsNotType asserts that the specified objects are not of the same type. -// -// a.IsNotType(&NotMyStruct{}, &MyStruct{}) -func (a *Assertions) IsNotType(theType interface{}, object interface{}, msgAndArgs ...interface{}) { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - IsNotType(a.t, theType, object, msgAndArgs...) -} - -// IsNotTypef asserts that the specified objects are not of the same type. -// -// a.IsNotTypef(&NotMyStruct{}, &MyStruct{}, "error message %s", "formatted") -func (a *Assertions) IsNotTypef(theType interface{}, object interface{}, msg string, args ...interface{}) { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - IsNotTypef(a.t, theType, object, msg, args...) -} - // IsType asserts that the specified objects are of the same type. -// -// a.IsType(&MyStruct{}, &MyStruct{}) func (a *Assertions) IsType(expectedType interface{}, object interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -914,8 +878,6 @@ func (a *Assertions) IsType(expectedType interface{}, object interface{}, msgAnd } // IsTypef asserts that the specified objects are of the same type. -// -// a.IsTypef(&MyStruct{}, &MyStruct{}, "error message %s", "formatted") func (a *Assertions) IsTypef(expectedType interface{}, object interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1201,7 +1163,8 @@ func (a *Assertions) NotElementsMatchf(listA interface{}, listB interface{}, msg NotElementsMatchf(a.t, listA, listB, msg, args...) } -// NotEmpty asserts that the specified object is NOT [Empty]. +// NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either +// a slice or a channel with len == 0. // // if a.NotEmpty(obj) { // assert.Equal(t, "two", obj[1]) @@ -1213,7 +1176,8 @@ func (a *Assertions) NotEmpty(object interface{}, msgAndArgs ...interface{}) { NotEmpty(a.t, object, msgAndArgs...) } -// NotEmptyf asserts that the specified object is NOT [Empty]. +// NotEmptyf asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either +// a slice or a channel with len == 0. // // if a.NotEmptyf(obj, "error message %s", "formatted") { // assert.Equal(t, "two", obj[1]) @@ -1415,15 +1379,12 @@ func (a *Assertions) NotSamef(expected interface{}, actual interface{}, msg stri NotSamef(a.t, expected, actual, msg, args...) } -// NotSubset asserts that the list (array, slice, or map) does NOT contain all -// elements given in the subset (array, slice, or map). -// Map elements are key-value pairs unless compared with an array or slice where -// only the map key is evaluated. +// NotSubset asserts that the specified list(array, slice...) or map does NOT +// contain all elements given in the specified subset list(array, slice...) or +// map. // // a.NotSubset([1, 3, 4], [1, 2]) // a.NotSubset({"x": 1, "y": 2}, {"z": 3}) -// a.NotSubset([1, 3, 4], {1: "one", 2: "two"}) -// a.NotSubset({"x": 1, "y": 2}, ["z"]) func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1431,15 +1392,12 @@ func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs NotSubset(a.t, list, subset, msgAndArgs...) } -// NotSubsetf asserts that the list (array, slice, or map) does NOT contain all -// elements given in the subset (array, slice, or map). -// Map elements are key-value pairs unless compared with an array or slice where -// only the map key is evaluated. +// NotSubsetf asserts that the specified list(array, slice...) or map does NOT +// contain all elements given in the specified subset list(array, slice...) or +// map. // // a.NotSubsetf([1, 3, 4], [1, 2], "error message %s", "formatted") // a.NotSubsetf({"x": 1, "y": 2}, {"z": 3}, "error message %s", "formatted") -// a.NotSubsetf([1, 3, 4], {1: "one", 2: "two"}, "error message %s", "formatted") -// a.NotSubsetf({"x": 1, "y": 2}, ["z"], "error message %s", "formatted") func (a *Assertions) NotSubsetf(list interface{}, subset interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1599,15 +1557,11 @@ func (a *Assertions) Samef(expected interface{}, actual interface{}, msg string, Samef(a.t, expected, actual, msg, args...) } -// Subset asserts that the list (array, slice, or map) contains all elements -// given in the subset (array, slice, or map). -// Map elements are key-value pairs unless compared with an array or slice where -// only the map key is evaluated. +// Subset asserts that the specified list(array, slice...) or map contains all +// elements given in the specified subset list(array, slice...) or map. // // a.Subset([1, 2, 3], [1, 2]) // a.Subset({"x": 1, "y": 2}, {"x": 1}) -// a.Subset([1, 2, 3], {1: "one", 2: "two"}) -// a.Subset({"x": 1, "y": 2}, ["x"]) func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1615,15 +1569,11 @@ func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ... Subset(a.t, list, subset, msgAndArgs...) } -// Subsetf asserts that the list (array, slice, or map) contains all elements -// given in the subset (array, slice, or map). -// Map elements are key-value pairs unless compared with an array or slice where -// only the map key is evaluated. +// Subsetf asserts that the specified list(array, slice...) or map contains all +// elements given in the specified subset list(array, slice...) or map. // // a.Subsetf([1, 2, 3], [1, 2], "error message %s", "formatted") // a.Subsetf({"x": 1, "y": 2}, {"x": 1}, "error message %s", "formatted") -// a.Subsetf([1, 2, 3], {1: "one", 2: "two"}, "error message %s", "formatted") -// a.Subsetf({"x": 1, "y": 2}, ["x"], "error message %s", "formatted") func (a *Assertions) Subsetf(list interface{}, subset interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() diff --git a/vendor/github.com/stretchr/testify/suite/stats.go b/vendor/github.com/stretchr/testify/suite/stats.go index be4ccd67..261da37f 100644 --- a/vendor/github.com/stretchr/testify/suite/stats.go +++ b/vendor/github.com/stretchr/testify/suite/stats.go @@ -16,30 +16,26 @@ type TestInformation struct { } func newSuiteInformation() *SuiteInformation { + testStats := make(map[string]*TestInformation) + return &SuiteInformation{ - TestStats: make(map[string]*TestInformation), + TestStats: testStats, } } -func (s *SuiteInformation) start(testName string) { - if s == nil { - return - } +func (s SuiteInformation) start(testName string) { s.TestStats[testName] = &TestInformation{ TestName: testName, Start: time.Now(), } } -func (s *SuiteInformation) end(testName string, passed bool) { - if s == nil { - return - } +func (s SuiteInformation) end(testName string, passed bool) { s.TestStats[testName].End = time.Now() s.TestStats[testName].Passed = passed } -func (s *SuiteInformation) Passed() bool { +func (s SuiteInformation) Passed() bool { for _, stats := range s.TestStats { if !stats.Passed { return false diff --git a/vendor/github.com/stretchr/testify/suite/suite.go b/vendor/github.com/stretchr/testify/suite/suite.go index 1b19be3b..18443a91 100644 --- a/vendor/github.com/stretchr/testify/suite/suite.go +++ b/vendor/github.com/stretchr/testify/suite/suite.go @@ -7,7 +7,6 @@ import ( "reflect" "regexp" "runtime/debug" - "strings" "sync" "testing" "time" @@ -16,6 +15,7 @@ import ( "github.com/stretchr/testify/require" ) +var allTestsFilter = func(_, _ string) (bool, error) { return true, nil } var matchMethod = flag.String("testify.m", "", "regular expression to select tests of the testify suite to run") // Suite is a basic testing suite with methods for storing and @@ -116,11 +116,6 @@ func (suite *Suite) Run(name string, subtest func()) bool { }) } -type test = struct { - name string - run func(t *testing.T) -} - // Run takes a testing suite and runs all of the tests attached // to it. func Run(t *testing.T, suite TestingSuite) { @@ -129,39 +124,45 @@ func Run(t *testing.T, suite TestingSuite) { suite.SetT(t) suite.SetS(suite) + var suiteSetupDone bool + var stats *SuiteInformation if _, ok := suite.(WithStats); ok { stats = newSuiteInformation() } - var tests []test + tests := []testing.InternalTest{} methodFinder := reflect.TypeOf(suite) suiteName := methodFinder.Elem().Name() - var matchMethodRE *regexp.Regexp - if *matchMethod != "" { - var err error - matchMethodRE, err = regexp.Compile(*matchMethod) - if err != nil { - fmt.Fprintf(os.Stderr, "testify: invalid regexp for -m: %s\n", err) - os.Exit(1) - } - } - for i := 0; i < methodFinder.NumMethod(); i++ { method := methodFinder.Method(i) - if !strings.HasPrefix(method.Name, "Test") { - continue + ok, err := methodFilter(method.Name) + if err != nil { + fmt.Fprintf(os.Stderr, "testify: invalid regexp for -m: %s\n", err) + os.Exit(1) } - // Apply -testify.m filter - if matchMethodRE != nil && !matchMethodRE.MatchString(method.Name) { + + if !ok { continue } - test := test{ - name: method.Name, - run: func(t *testing.T) { + if !suiteSetupDone { + if stats != nil { + stats.Start = time.Now() + } + + if setupAllSuite, ok := suite.(SetupAllSuite); ok { + setupAllSuite.SetupSuite() + } + + suiteSetupDone = true + } + + test := testing.InternalTest{ + Name: method.Name, + F: func(t *testing.T) { parentT := suite.T() suite.SetT(t) defer recoverAndFailOnPanic(t) @@ -170,7 +171,10 @@ func Run(t *testing.T, suite TestingSuite) { r := recover() - stats.end(method.Name, !t.Failed() && r == nil) + if stats != nil { + passed := !t.Failed() && r == nil + stats.end(method.Name, passed) + } if afterTestSuite, ok := suite.(AfterTest); ok { afterTestSuite.AfterTest(suiteName, method.Name) @@ -191,47 +195,59 @@ func Run(t *testing.T, suite TestingSuite) { beforeTestSuite.BeforeTest(methodFinder.Elem().Name(), method.Name) } - stats.start(method.Name) + if stats != nil { + stats.start(method.Name) + } method.Func.Call([]reflect.Value{reflect.ValueOf(suite)}) }, } tests = append(tests, test) } + if suiteSetupDone { + defer func() { + if tearDownAllSuite, ok := suite.(TearDownAllSuite); ok { + tearDownAllSuite.TearDownSuite() + } - if len(tests) == 0 { - return + if suiteWithStats, measureStats := suite.(WithStats); measureStats { + stats.End = time.Now() + suiteWithStats.HandleStats(suiteName, stats) + } + }() } - if stats != nil { - stats.Start = time.Now() - } - - if setupAllSuite, ok := suite.(SetupAllSuite); ok { - setupAllSuite.SetupSuite() - } - - defer func() { - if tearDownAllSuite, ok := suite.(TearDownAllSuite); ok { - tearDownAllSuite.TearDownSuite() - } - - if suiteWithStats, measureStats := suite.(WithStats); measureStats { - stats.End = time.Now() - suiteWithStats.HandleStats(suiteName, stats) - } - }() - runTests(t, tests) } -func runTests(t *testing.T, tests []test) { +// Filtering method according to set regular expression +// specified command-line argument -m +func methodFilter(name string) (bool, error) { + if ok, _ := regexp.MatchString("^Test", name); !ok { + return false, nil + } + return regexp.MatchString(*matchMethod, name) +} + +func runTests(t testing.TB, tests []testing.InternalTest) { if len(tests) == 0 { t.Log("warning: no tests to run") return } + r, ok := t.(runner) + if !ok { // backwards compatibility with Go 1.6 and below + if !testing.RunTests(allTestsFilter, tests) { + t.Fail() + } + return + } + for _, test := range tests { - t.Run(test.name, test.run) + r.Run(test.Name, test.F) } } + +type runner interface { + Run(name string, f func(t *testing.T)) bool +} diff --git a/vendor/go.opentelemetry.io/otel/.clomonitor.yml b/vendor/go.opentelemetry.io/otel/.clomonitor.yml new file mode 100644 index 00000000..128d61a2 --- /dev/null +++ b/vendor/go.opentelemetry.io/otel/.clomonitor.yml @@ -0,0 +1,3 @@ +exemptions: + - check: artifacthub_badge + reason: "Artifact Hub doesn't support Go packages" diff --git a/vendor/go.opentelemetry.io/otel/.golangci.yml b/vendor/go.opentelemetry.io/otel/.golangci.yml index 888e5da8..5f69cc02 100644 --- a/vendor/go.opentelemetry.io/otel/.golangci.yml +++ b/vendor/go.opentelemetry.io/otel/.golangci.yml @@ -66,8 +66,6 @@ linters: desc: Do not use cross-module internal packages. - pkg: go.opentelemetry.io/otel/internal/internaltest desc: Do not use cross-module internal packages. - - pkg: go.opentelemetry.io/otel/internal/matchers - desc: Do not use cross-module internal packages. otlp-internal: files: - '!**/exporters/otlp/internal/**/*.go' @@ -190,6 +188,10 @@ linters: - legacy - std-error-handling rules: + - linters: + - revive + path: schema/v.*/types/.* + text: avoid meaningless package names # TODO: Having appropriate comments for exported objects helps development, # even for objects in internal packages. Appropriate comments for all # exported objects should be added and this exclusion removed. diff --git a/vendor/go.opentelemetry.io/otel/CHANGELOG.md b/vendor/go.opentelemetry.io/otel/CHANGELOG.md index 648e4aba..4acc7570 100644 --- a/vendor/go.opentelemetry.io/otel/CHANGELOG.md +++ b/vendor/go.opentelemetry.io/otel/CHANGELOG.md @@ -11,6 +11,61 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm +## [1.37.0/0.59.0/0.13.0] 2025-06-25 + +### Added + +- The `go.opentelemetry.io/otel/semconv/v1.33.0` package. + The package contains semantic conventions from the `v1.33.0` version of the OpenTelemetry Semantic Conventions. + See the [migration documentation](./semconv/v1.33.0/MIGRATION.md) for information on how to upgrade from `go.opentelemetry.io/otel/semconv/v1.32.0.`(#6799) +- The `go.opentelemetry.io/otel/semconv/v1.34.0` package. + The package contains semantic conventions from the `v1.34.0` version of the OpenTelemetry Semantic Conventions. (#6812) +- Add metric's schema URL as `otel_scope_schema_url` label in `go.opentelemetry.io/otel/exporters/prometheus`. (#5947) +- Add metric's scope attributes as `otel_scope_[attribute]` labels in `go.opentelemetry.io/otel/exporters/prometheus`. (#5947) +- Add `EventName` to `EnabledParameters` in `go.opentelemetry.io/otel/log`. (#6825) +- Add `EventName` to `EnabledParameters` in `go.opentelemetry.io/otel/sdk/log`. (#6825) +- Changed handling of `go.opentelemetry.io/otel/exporters/prometheus` metric renaming to add unit suffixes when it doesn't match one of the pre-defined values in the unit suffix map. (#6839) + +### Changed + +- The semantic conventions have been upgraded from `v1.26.0` to `v1.34.0` in `go.opentelemetry.io/otel/bridge/opentracing`. (#6827) +- The semantic conventions have been upgraded from `v1.26.0` to `v1.34.0` in `go.opentelemetry.io/otel/exporters/zipkin`. (#6829) +- The semantic conventions have been upgraded from `v1.26.0` to `v1.34.0` in `go.opentelemetry.io/otel/metric`. (#6832) +- The semantic conventions have been upgraded from `v1.26.0` to `v1.34.0` in `go.opentelemetry.io/otel/sdk/resource`. (#6834) +- The semantic conventions have been upgraded from `v1.26.0` to `v1.34.0` in `go.opentelemetry.io/otel/sdk/trace`. (#6835) +- The semantic conventions have been upgraded from `v1.26.0` to `v1.34.0` in `go.opentelemetry.io/otel/trace`. (#6836) +- `Record.Resource` now returns `*resource.Resource` instead of `resource.Resource` in `go.opentelemetry.io/otel/sdk/log`. (#6864) +- Retry now shows error cause for context timeout in `go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc`, `go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc`, `go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc`, `go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp`, `go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp`, `go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp`. (#6898) + +### Fixed + +- Stop stripping trailing slashes from configured endpoint URL in `go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc`. (#6710) +- Stop stripping trailing slashes from configured endpoint URL in `go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp`. (#6710) +- Stop stripping trailing slashes from configured endpoint URL in `go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc`. (#6710) +- Stop stripping trailing slashes from configured endpoint URL in `go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp`. (#6710) +- Validate exponential histogram scale range for Prometheus compatibility in `go.opentelemetry.io/otel/exporters/prometheus`. (#6822) +- Context cancellation during metric pipeline produce does not corrupt data in `go.opentelemetry.io/otel/sdk/metric`. (#6914) + +### Removed + +- `go.opentelemetry.io/otel/exporters/prometheus` no longer exports `otel_scope_info` metric. (#6770) + +## [0.12.2] 2025-05-22 + +### Fixed + +- Retract `v0.12.0` release of `go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc` module that contains invalid dependencies. (#6804) +- Retract `v0.12.0` release of `go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp` module that contains invalid dependencies. (#6804) +- Retract `v0.12.0` release of `go.opentelemetry.io/otel/exporters/stdout/stdoutlog` module that contains invalid dependencies. (#6804) + +## [0.12.1] 2025-05-21 + +### Fixes + +- Use the proper dependency version of `go.opentelemetry.io/otel/sdk/log/logtest` in `go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc`. (#6800) +- Use the proper dependency version of `go.opentelemetry.io/otel/sdk/log/logtest` in `go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp`. (#6800) +- Use the proper dependency version of `go.opentelemetry.io/otel/sdk/log/logtest` in `go.opentelemetry.io/otel/exporters/stdout/stdoutlog`. (#6800) + ## [1.36.0/0.58.0/0.12.0] 2025-05-20 ### Added @@ -3288,7 +3343,10 @@ It contains api and sdk for trace and meter. - CircleCI build CI manifest files. - CODEOWNERS file to track owners of this project. -[Unreleased]: https://github.com/open-telemetry/opentelemetry-go/compare/v1.36.0...HEAD +[Unreleased]: https://github.com/open-telemetry/opentelemetry-go/compare/v1.37.0...HEAD +[1.37.0/0.59.0/0.13.0]: https://github.com/open-telemetry/opentelemetry-go/releases/tag/v1.37.0 +[0.12.2]: https://github.com/open-telemetry/opentelemetry-go/releases/tag/log/v0.12.2 +[0.12.1]: https://github.com/open-telemetry/opentelemetry-go/releases/tag/log/v0.12.1 [1.36.0/0.58.0/0.12.0]: https://github.com/open-telemetry/opentelemetry-go/releases/tag/v1.36.0 [1.35.0/0.57.0/0.11.0]: https://github.com/open-telemetry/opentelemetry-go/releases/tag/v1.35.0 [1.34.0/0.56.0/0.10.0]: https://github.com/open-telemetry/opentelemetry-go/releases/tag/v1.34.0 diff --git a/vendor/go.opentelemetry.io/otel/CONTRIBUTING.md b/vendor/go.opentelemetry.io/otel/CONTRIBUTING.md index 1902dac0..f9ddc281 100644 --- a/vendor/go.opentelemetry.io/otel/CONTRIBUTING.md +++ b/vendor/go.opentelemetry.io/otel/CONTRIBUTING.md @@ -109,10 +109,9 @@ A PR is considered **ready to merge** when: This is not enforced through automation, but needs to be validated by the maintainer merging. - * The qualified approvals need to be from [Approver]s/[Maintainer]s - affiliated with different companies. Two qualified approvals from - [Approver]s or [Maintainer]s affiliated with the same company counts as a - single qualified approval. + * At least one of the qualified approvals need to be from an + [Approver]/[Maintainer] affiliated with a different company than the author + of the PR. * PRs introducing changes that have already been discussed and consensus reached only need one qualified approval. The discussion and resolution needs to be linked to the PR. @@ -650,11 +649,11 @@ should be canceled. ### Maintainers -- [Damien Mathieu](https://github.com/dmathieu), Elastic -- [David Ashpole](https://github.com/dashpole), Google -- [Robert Pająk](https://github.com/pellared), Splunk -- [Sam Xie](https://github.com/XSAM), Cisco/AppDynamics -- [Tyler Yahn](https://github.com/MrAlias), Splunk +- [Damien Mathieu](https://github.com/dmathieu), Elastic ([GPG](https://keys.openpgp.org/search?q=5A126B972A81A6CE443E5E1B408B8E44F0873832)) +- [David Ashpole](https://github.com/dashpole), Google ([GPG](https://keys.openpgp.org/search?q=C0D1BDDCAAEAE573673085F176327DA4D864DC70)) +- [Robert Pająk](https://github.com/pellared), Splunk ([GPG](https://keys.openpgp.org/search?q=CDAD3A60476A3DE599AA5092E5F7C35A4DBE90C2)) +- [Sam Xie](https://github.com/XSAM), Splunk ([GPG](https://keys.openpgp.org/search?q=AEA033782371ABB18EE39188B8044925D6FEEBEA)) +- [Tyler Yahn](https://github.com/MrAlias), Splunk ([GPG](https://keys.openpgp.org/search?q=0x46B0F3E1A8B1BA5A)) ### Emeritus diff --git a/vendor/go.opentelemetry.io/otel/Makefile b/vendor/go.opentelemetry.io/otel/Makefile index 62a56f4d..4fa423ca 100644 --- a/vendor/go.opentelemetry.io/otel/Makefile +++ b/vendor/go.opentelemetry.io/otel/Makefile @@ -293,7 +293,7 @@ semconv-generate: $(SEMCONVKIT) --param tag=$(TAG) \ go \ /home/weaver/target - $(SEMCONVKIT) -output "$(SEMCONVPKG)/$(TAG)" -tag "$(TAG)" + $(SEMCONVKIT) -semconv "$(SEMCONVPKG)" -tag "$(TAG)" .PHONY: gorelease gorelease: $(OTEL_GO_MOD_DIRS:%=gorelease/%) diff --git a/vendor/go.opentelemetry.io/otel/README.md b/vendor/go.opentelemetry.io/otel/README.md index b6007881..5fa1b75c 100644 --- a/vendor/go.opentelemetry.io/otel/README.md +++ b/vendor/go.opentelemetry.io/otel/README.md @@ -7,6 +7,7 @@ [![OpenSSF Scorecard](https://api.scorecard.dev/projects/github.com/open-telemetry/opentelemetry-go/badge)](https://scorecard.dev/viewer/?uri=github.com/open-telemetry/opentelemetry-go) [![OpenSSF Best Practices](https://www.bestpractices.dev/projects/9996/badge)](https://www.bestpractices.dev/projects/9996) [![Fuzzing Status](https://oss-fuzz-build-logs.storage.googleapis.com/badges/opentelemetry-go.svg)](https://issues.oss-fuzz.com/issues?q=project:opentelemetry-go) +[![FOSSA Status](https://app.fossa.com/api/projects/custom%2B162%2Fgithub.com%2Fopen-telemetry%2Fopentelemetry-go.svg?type=shield&issueType=license)](https://app.fossa.com/projects/custom%2B162%2Fgithub.com%2Fopen-telemetry%2Fopentelemetry-go?ref=badge_shield&issueType=license) [![Slack](https://img.shields.io/badge/slack-@cncf/otel--go-brightgreen.svg?logo=slack)](https://cloud-native.slack.com/archives/C01NPAXACKT) OpenTelemetry-Go is the [Go](https://golang.org/) implementation of [OpenTelemetry](https://opentelemetry.io/). diff --git a/vendor/go.opentelemetry.io/otel/RELEASING.md b/vendor/go.opentelemetry.io/otel/RELEASING.md index 7c1a9119..1ddcdef0 100644 --- a/vendor/go.opentelemetry.io/otel/RELEASING.md +++ b/vendor/go.opentelemetry.io/otel/RELEASING.md @@ -112,6 +112,29 @@ It is critical you make sure the version you push upstream is correct. Finally create a Release for the new `` on GitHub. The release body should include all the release notes from the Changelog for this release. +### Sign the Release Artifact + +To ensure we comply with CNCF best practices, we need to sign the release artifact. +The tarball attached to the GitHub release needs to be signed with your GPG key. + +Follow [these steps] to sign the release artifact and upload it to GitHub. +You can use [this script] to verify the contents of the tarball before signing it. + +Be sure to use the correct GPG key when signing the release artifact. + +```terminal +gpg --local-user --armor --detach-sign opentelemetry-go-.tar.gz +``` + +You can verify the signature with: + +```terminal +gpg --verify opentelemetry-go-.tar.gz.asc opentelemetry-go-.tar.gz +``` + +[these steps]: https://wiki.debian.org/Creating%20signed%20GitHub%20releases +[this script]: https://github.com/MrAlias/attest-sh + ## Post-Release ### Contrib Repository diff --git a/vendor/go.opentelemetry.io/otel/dependencies.Dockerfile b/vendor/go.opentelemetry.io/otel/dependencies.Dockerfile index 51fb76b3..935bd487 100644 --- a/vendor/go.opentelemetry.io/otel/dependencies.Dockerfile +++ b/vendor/go.opentelemetry.io/otel/dependencies.Dockerfile @@ -1,4 +1,4 @@ # This is a renovate-friendly source of Docker images. -FROM python:3.13.3-slim-bullseye@sha256:9e3f9243e06fd68eb9519074b49878eda20ad39a855fac51aaffb741de20726e AS python -FROM otel/weaver:v0.15.0@sha256:1cf1c72eaed57dad813c2e359133b8a15bd4facf305aae5b13bdca6d3eccff56 AS weaver +FROM python:3.13.5-slim-bullseye@sha256:5b9fc0d8ef79cfb5f300e61cb516e0c668067bbf77646762c38c94107e230dbc AS python +FROM otel/weaver:v0.15.2@sha256:b13acea09f721774daba36344861f689ac4bb8d6ecd94c4600b4d590c8fb34b9 AS weaver FROM avtodev/markdown-lint:v1@sha256:6aeedc2f49138ce7a1cd0adffc1b1c0321b841dc2102408967d9301c031949ee AS markdown diff --git a/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/MIGRATION.md b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/MIGRATION.md new file mode 100644 index 00000000..02b56115 --- /dev/null +++ b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/MIGRATION.md @@ -0,0 +1,4 @@ + +# Migration from v1.33.0 to v1.34.0 + +The `go.opentelemetry.io/otel/semconv/v1.34.0` package should be a drop-in replacement for `go.opentelemetry.io/otel/semconv/v1.33.0`. diff --git a/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/README.md b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/README.md new file mode 100644 index 00000000..fab06c97 --- /dev/null +++ b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/README.md @@ -0,0 +1,3 @@ +# Semconv v1.34.0 + +[![PkgGoDev](https://pkg.go.dev/badge/go.opentelemetry.io/otel/semconv/v1.34.0)](https://pkg.go.dev/go.opentelemetry.io/otel/semconv/v1.34.0) diff --git a/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/attribute_group.go b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/attribute_group.go new file mode 100644 index 00000000..5b566625 --- /dev/null +++ b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/attribute_group.go @@ -0,0 +1,13851 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +// Code generated from semantic convention specification. DO NOT EDIT. + +package semconv // import "go.opentelemetry.io/otel/semconv/v1.34.0" + +import "go.opentelemetry.io/otel/attribute" + +// Namespace: android +const ( + // AndroidAppStateKey is the attribute Key conforming to the "android.app.state" + // semantic conventions. It represents the this attribute represents the state + // of the application. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "created" + // Note: The Android lifecycle states are defined in + // [Activity lifecycle callbacks], and from which the `OS identifiers` are + // derived. + // + // [Activity lifecycle callbacks]: https://developer.android.com/guide/components/activities/activity-lifecycle#lc + AndroidAppStateKey = attribute.Key("android.app.state") + + // AndroidOSAPILevelKey is the attribute Key conforming to the + // "android.os.api_level" semantic conventions. It represents the uniquely + // identifies the framework API revision offered by a version (`os.version`) of + // the android operating system. More information can be found [here]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "33", "32" + // + // [here]: https://developer.android.com/guide/topics/manifest/uses-sdk-element#ApiLevels + AndroidOSAPILevelKey = attribute.Key("android.os.api_level") +) + +// AndroidOSAPILevel returns an attribute KeyValue conforming to the +// "android.os.api_level" semantic conventions. It represents the uniquely +// identifies the framework API revision offered by a version (`os.version`) of +// the android operating system. More information can be found [here]. +// +// [here]: https://developer.android.com/guide/topics/manifest/uses-sdk-element#ApiLevels +func AndroidOSAPILevel(val string) attribute.KeyValue { + return AndroidOSAPILevelKey.String(val) +} + +// Enum values for android.app.state +var ( + // Any time before Activity.onResume() or, if the app has no Activity, + // Context.startService() has been called in the app for the first time. + // + // Stability: development + AndroidAppStateCreated = AndroidAppStateKey.String("created") + // Any time after Activity.onPause() or, if the app has no Activity, + // Context.stopService() has been called when the app was in the foreground + // state. + // + // Stability: development + AndroidAppStateBackground = AndroidAppStateKey.String("background") + // Any time after Activity.onResume() or, if the app has no Activity, + // Context.startService() has been called when the app was in either the created + // or background states. + // + // Stability: development + AndroidAppStateForeground = AndroidAppStateKey.String("foreground") +) + +// Namespace: app +const ( + // AppInstallationIDKey is the attribute Key conforming to the + // "app.installation.id" semantic conventions. It represents a unique identifier + // representing the installation of an application on a specific device. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2ab2916d-a51f-4ac8-80ee-45ac31a28092" + // Note: Its value SHOULD persist across launches of the same application + // installation, including through application upgrades. + // It SHOULD change if the application is uninstalled or if all applications of + // the vendor are uninstalled. + // Additionally, users might be able to reset this value (e.g. by clearing + // application data). + // If an app is installed multiple times on the same device (e.g. in different + // accounts on Android), each `app.installation.id` SHOULD have a different + // value. + // If multiple OpenTelemetry SDKs are used within the same application, they + // SHOULD use the same value for `app.installation.id`. + // Hardware IDs (e.g. serial number, IMEI, MAC address) MUST NOT be used as the + // `app.installation.id`. + // + // For iOS, this value SHOULD be equal to the [vendor identifier]. + // + // For Android, examples of `app.installation.id` implementations include: + // + // - [Firebase Installation ID]. + // - A globally unique UUID which is persisted across sessions in your + // application. + // - [App set ID]. + // - [`Settings.getString(Settings.Secure.ANDROID_ID)`]. + // + // More information about Android identifier best practices can be found [here] + // . + // + // [vendor identifier]: https://developer.apple.com/documentation/uikit/uidevice/identifierforvendor + // [Firebase Installation ID]: https://firebase.google.com/docs/projects/manage-installations + // [App set ID]: https://developer.android.com/identity/app-set-id + // [`Settings.getString(Settings.Secure.ANDROID_ID)`]: https://developer.android.com/reference/android/provider/Settings.Secure#ANDROID_ID + // [here]: https://developer.android.com/training/articles/user-data-ids + AppInstallationIDKey = attribute.Key("app.installation.id") + + // AppScreenCoordinateXKey is the attribute Key conforming to the + // "app.screen.coordinate.x" semantic conventions. It represents the x + // (horizontal) coordinate of a screen coordinate, in screen pixels. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 0, 131 + AppScreenCoordinateXKey = attribute.Key("app.screen.coordinate.x") + + // AppScreenCoordinateYKey is the attribute Key conforming to the + // "app.screen.coordinate.y" semantic conventions. It represents the y + // (vertical) component of a screen coordinate, in screen pixels. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 12, 99 + AppScreenCoordinateYKey = attribute.Key("app.screen.coordinate.y") + + // AppWidgetIDKey is the attribute Key conforming to the "app.widget.id" + // semantic conventions. It represents an identifier that uniquely + // differentiates this widget from other widgets in the same application. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "f9bc787d-ff05-48ad-90e1-fca1d46130b3", "submit_order_1829" + // Note: A widget is an application component, typically an on-screen visual GUI + // element. + AppWidgetIDKey = attribute.Key("app.widget.id") + + // AppWidgetNameKey is the attribute Key conforming to the "app.widget.name" + // semantic conventions. It represents the name of an application widget. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "submit", "attack", "Clear Cart" + // Note: A widget is an application component, typically an on-screen visual GUI + // element. + AppWidgetNameKey = attribute.Key("app.widget.name") +) + +// AppInstallationID returns an attribute KeyValue conforming to the +// "app.installation.id" semantic conventions. It represents a unique identifier +// representing the installation of an application on a specific device. +func AppInstallationID(val string) attribute.KeyValue { + return AppInstallationIDKey.String(val) +} + +// AppScreenCoordinateX returns an attribute KeyValue conforming to the +// "app.screen.coordinate.x" semantic conventions. It represents the x +// (horizontal) coordinate of a screen coordinate, in screen pixels. +func AppScreenCoordinateX(val int) attribute.KeyValue { + return AppScreenCoordinateXKey.Int(val) +} + +// AppScreenCoordinateY returns an attribute KeyValue conforming to the +// "app.screen.coordinate.y" semantic conventions. It represents the y (vertical) +// component of a screen coordinate, in screen pixels. +func AppScreenCoordinateY(val int) attribute.KeyValue { + return AppScreenCoordinateYKey.Int(val) +} + +// AppWidgetID returns an attribute KeyValue conforming to the "app.widget.id" +// semantic conventions. It represents an identifier that uniquely differentiates +// this widget from other widgets in the same application. +func AppWidgetID(val string) attribute.KeyValue { + return AppWidgetIDKey.String(val) +} + +// AppWidgetName returns an attribute KeyValue conforming to the +// "app.widget.name" semantic conventions. It represents the name of an +// application widget. +func AppWidgetName(val string) attribute.KeyValue { + return AppWidgetNameKey.String(val) +} + +// Namespace: artifact +const ( + // ArtifactAttestationFilenameKey is the attribute Key conforming to the + // "artifact.attestation.filename" semantic conventions. It represents the + // provenance filename of the built attestation which directly relates to the + // build artifact filename. This filename SHOULD accompany the artifact at + // publish time. See the [SLSA Relationship] specification for more information. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "golang-binary-amd64-v0.1.0.attestation", + // "docker-image-amd64-v0.1.0.intoto.json1", "release-1.tar.gz.attestation", + // "file-name-package.tar.gz.intoto.json1" + // + // [SLSA Relationship]: https://slsa.dev/spec/v1.0/distributing-provenance#relationship-between-artifacts-and-attestations + ArtifactAttestationFilenameKey = attribute.Key("artifact.attestation.filename") + + // ArtifactAttestationHashKey is the attribute Key conforming to the + // "artifact.attestation.hash" semantic conventions. It represents the full + // [hash value (see glossary)], of the built attestation. Some envelopes in the + // [software attestation space] also refer to this as the **digest**. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1b31dfcd5b7f9267bf2ff47651df1cfb9147b9e4df1f335accf65b4cda498408" + // + // [hash value (see glossary)]: https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-5.pdf + // [software attestation space]: https://github.com/in-toto/attestation/tree/main/spec + ArtifactAttestationHashKey = attribute.Key("artifact.attestation.hash") + + // ArtifactAttestationIDKey is the attribute Key conforming to the + // "artifact.attestation.id" semantic conventions. It represents the id of the + // build [software attestation]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "123" + // + // [software attestation]: https://slsa.dev/attestation-model + ArtifactAttestationIDKey = attribute.Key("artifact.attestation.id") + + // ArtifactFilenameKey is the attribute Key conforming to the + // "artifact.filename" semantic conventions. It represents the human readable + // file name of the artifact, typically generated during build and release + // processes. Often includes the package name and version in the file name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "golang-binary-amd64-v0.1.0", "docker-image-amd64-v0.1.0", + // "release-1.tar.gz", "file-name-package.tar.gz" + // Note: This file name can also act as the [Package Name] + // in cases where the package ecosystem maps accordingly. + // Additionally, the artifact [can be published] + // for others, but that is not a guarantee. + // + // [Package Name]: https://slsa.dev/spec/v1.0/terminology#package-model + // [can be published]: https://slsa.dev/spec/v1.0/terminology#software-supply-chain + ArtifactFilenameKey = attribute.Key("artifact.filename") + + // ArtifactHashKey is the attribute Key conforming to the "artifact.hash" + // semantic conventions. It represents the full [hash value (see glossary)], + // often found in checksum.txt on a release of the artifact and used to verify + // package integrity. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "9ff4c52759e2c4ac70b7d517bc7fcdc1cda631ca0045271ddd1b192544f8a3e9" + // Note: The specific algorithm used to create the cryptographic hash value is + // not defined. In situations where an artifact has multiple + // cryptographic hashes, it is up to the implementer to choose which + // hash value to set here; this should be the most secure hash algorithm + // that is suitable for the situation and consistent with the + // corresponding attestation. The implementer can then provide the other + // hash values through an additional set of attribute extensions as they + // deem necessary. + // + // [hash value (see glossary)]: https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-5.pdf + ArtifactHashKey = attribute.Key("artifact.hash") + + // ArtifactPurlKey is the attribute Key conforming to the "artifact.purl" + // semantic conventions. It represents the [Package URL] of the + // [package artifact] provides a standard way to identify and locate the + // packaged artifact. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "pkg:github/package-url/purl-spec@1209109710924", + // "pkg:npm/foo@12.12.3" + // + // [Package URL]: https://github.com/package-url/purl-spec + // [package artifact]: https://slsa.dev/spec/v1.0/terminology#package-model + ArtifactPurlKey = attribute.Key("artifact.purl") + + // ArtifactVersionKey is the attribute Key conforming to the "artifact.version" + // semantic conventions. It represents the version of the artifact. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "v0.1.0", "1.2.1", "122691-build" + ArtifactVersionKey = attribute.Key("artifact.version") +) + +// ArtifactAttestationFilename returns an attribute KeyValue conforming to the +// "artifact.attestation.filename" semantic conventions. It represents the +// provenance filename of the built attestation which directly relates to the +// build artifact filename. This filename SHOULD accompany the artifact at +// publish time. See the [SLSA Relationship] specification for more information. +// +// [SLSA Relationship]: https://slsa.dev/spec/v1.0/distributing-provenance#relationship-between-artifacts-and-attestations +func ArtifactAttestationFilename(val string) attribute.KeyValue { + return ArtifactAttestationFilenameKey.String(val) +} + +// ArtifactAttestationHash returns an attribute KeyValue conforming to the +// "artifact.attestation.hash" semantic conventions. It represents the full +// [hash value (see glossary)], of the built attestation. Some envelopes in the +// [software attestation space] also refer to this as the **digest**. +// +// [hash value (see glossary)]: https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-5.pdf +// [software attestation space]: https://github.com/in-toto/attestation/tree/main/spec +func ArtifactAttestationHash(val string) attribute.KeyValue { + return ArtifactAttestationHashKey.String(val) +} + +// ArtifactAttestationID returns an attribute KeyValue conforming to the +// "artifact.attestation.id" semantic conventions. It represents the id of the +// build [software attestation]. +// +// [software attestation]: https://slsa.dev/attestation-model +func ArtifactAttestationID(val string) attribute.KeyValue { + return ArtifactAttestationIDKey.String(val) +} + +// ArtifactFilename returns an attribute KeyValue conforming to the +// "artifact.filename" semantic conventions. It represents the human readable +// file name of the artifact, typically generated during build and release +// processes. Often includes the package name and version in the file name. +func ArtifactFilename(val string) attribute.KeyValue { + return ArtifactFilenameKey.String(val) +} + +// ArtifactHash returns an attribute KeyValue conforming to the "artifact.hash" +// semantic conventions. It represents the full [hash value (see glossary)], +// often found in checksum.txt on a release of the artifact and used to verify +// package integrity. +// +// [hash value (see glossary)]: https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-5.pdf +func ArtifactHash(val string) attribute.KeyValue { + return ArtifactHashKey.String(val) +} + +// ArtifactPurl returns an attribute KeyValue conforming to the "artifact.purl" +// semantic conventions. It represents the [Package URL] of the +// [package artifact] provides a standard way to identify and locate the packaged +// artifact. +// +// [Package URL]: https://github.com/package-url/purl-spec +// [package artifact]: https://slsa.dev/spec/v1.0/terminology#package-model +func ArtifactPurl(val string) attribute.KeyValue { + return ArtifactPurlKey.String(val) +} + +// ArtifactVersion returns an attribute KeyValue conforming to the +// "artifact.version" semantic conventions. It represents the version of the +// artifact. +func ArtifactVersion(val string) attribute.KeyValue { + return ArtifactVersionKey.String(val) +} + +// Namespace: aws +const ( + // AWSBedrockGuardrailIDKey is the attribute Key conforming to the + // "aws.bedrock.guardrail.id" semantic conventions. It represents the unique + // identifier of the AWS Bedrock Guardrail. A [guardrail] helps safeguard and + // prevent unwanted behavior from model responses or user messages. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "sgi5gkybzqak" + // + // [guardrail]: https://docs.aws.amazon.com/bedrock/latest/userguide/guardrails.html + AWSBedrockGuardrailIDKey = attribute.Key("aws.bedrock.guardrail.id") + + // AWSBedrockKnowledgeBaseIDKey is the attribute Key conforming to the + // "aws.bedrock.knowledge_base.id" semantic conventions. It represents the + // unique identifier of the AWS Bedrock Knowledge base. A [knowledge base] is a + // bank of information that can be queried by models to generate more relevant + // responses and augment prompts. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "XFWUPB9PAW" + // + // [knowledge base]: https://docs.aws.amazon.com/bedrock/latest/userguide/knowledge-base.html + AWSBedrockKnowledgeBaseIDKey = attribute.Key("aws.bedrock.knowledge_base.id") + + // AWSDynamoDBAttributeDefinitionsKey is the attribute Key conforming to the + // "aws.dynamodb.attribute_definitions" semantic conventions. It represents the + // JSON-serialized value of each item in the `AttributeDefinitions` request + // field. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "{ "AttributeName": "string", "AttributeType": "string" }" + AWSDynamoDBAttributeDefinitionsKey = attribute.Key("aws.dynamodb.attribute_definitions") + + // AWSDynamoDBAttributesToGetKey is the attribute Key conforming to the + // "aws.dynamodb.attributes_to_get" semantic conventions. It represents the + // value of the `AttributesToGet` request parameter. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "lives", "id" + AWSDynamoDBAttributesToGetKey = attribute.Key("aws.dynamodb.attributes_to_get") + + // AWSDynamoDBConsistentReadKey is the attribute Key conforming to the + // "aws.dynamodb.consistent_read" semantic conventions. It represents the value + // of the `ConsistentRead` request parameter. + // + // Type: boolean + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + AWSDynamoDBConsistentReadKey = attribute.Key("aws.dynamodb.consistent_read") + + // AWSDynamoDBConsumedCapacityKey is the attribute Key conforming to the + // "aws.dynamodb.consumed_capacity" semantic conventions. It represents the + // JSON-serialized value of each item in the `ConsumedCapacity` response field. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "{ "CapacityUnits": number, "GlobalSecondaryIndexes": { "string" : + // { "CapacityUnits": number, "ReadCapacityUnits": number, "WriteCapacityUnits": + // number } }, "LocalSecondaryIndexes": { "string" : { "CapacityUnits": number, + // "ReadCapacityUnits": number, "WriteCapacityUnits": number } }, + // "ReadCapacityUnits": number, "Table": { "CapacityUnits": number, + // "ReadCapacityUnits": number, "WriteCapacityUnits": number }, "TableName": + // "string", "WriteCapacityUnits": number }" + AWSDynamoDBConsumedCapacityKey = attribute.Key("aws.dynamodb.consumed_capacity") + + // AWSDynamoDBCountKey is the attribute Key conforming to the + // "aws.dynamodb.count" semantic conventions. It represents the value of the + // `Count` response parameter. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 10 + AWSDynamoDBCountKey = attribute.Key("aws.dynamodb.count") + + // AWSDynamoDBExclusiveStartTableKey is the attribute Key conforming to the + // "aws.dynamodb.exclusive_start_table" semantic conventions. It represents the + // value of the `ExclusiveStartTableName` request parameter. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Users", "CatsTable" + AWSDynamoDBExclusiveStartTableKey = attribute.Key("aws.dynamodb.exclusive_start_table") + + // AWSDynamoDBGlobalSecondaryIndexUpdatesKey is the attribute Key conforming to + // the "aws.dynamodb.global_secondary_index_updates" semantic conventions. It + // represents the JSON-serialized value of each item in the + // `GlobalSecondaryIndexUpdates` request field. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "{ "Create": { "IndexName": "string", "KeySchema": [ { + // "AttributeName": "string", "KeyType": "string" } ], "Projection": { + // "NonKeyAttributes": [ "string" ], "ProjectionType": "string" }, + // "ProvisionedThroughput": { "ReadCapacityUnits": number, "WriteCapacityUnits": + // number } }" + AWSDynamoDBGlobalSecondaryIndexUpdatesKey = attribute.Key("aws.dynamodb.global_secondary_index_updates") + + // AWSDynamoDBGlobalSecondaryIndexesKey is the attribute Key conforming to the + // "aws.dynamodb.global_secondary_indexes" semantic conventions. It represents + // the JSON-serialized value of each item of the `GlobalSecondaryIndexes` + // request field. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "{ "IndexName": "string", "KeySchema": [ { "AttributeName": + // "string", "KeyType": "string" } ], "Projection": { "NonKeyAttributes": [ + // "string" ], "ProjectionType": "string" }, "ProvisionedThroughput": { + // "ReadCapacityUnits": number, "WriteCapacityUnits": number } }" + AWSDynamoDBGlobalSecondaryIndexesKey = attribute.Key("aws.dynamodb.global_secondary_indexes") + + // AWSDynamoDBIndexNameKey is the attribute Key conforming to the + // "aws.dynamodb.index_name" semantic conventions. It represents the value of + // the `IndexName` request parameter. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "name_to_group" + AWSDynamoDBIndexNameKey = attribute.Key("aws.dynamodb.index_name") + + // AWSDynamoDBItemCollectionMetricsKey is the attribute Key conforming to the + // "aws.dynamodb.item_collection_metrics" semantic conventions. It represents + // the JSON-serialized value of the `ItemCollectionMetrics` response field. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "{ "string" : [ { "ItemCollectionKey": { "string" : { "B": blob, + // "BOOL": boolean, "BS": [ blob ], "L": [ "AttributeValue" ], "M": { "string" : + // "AttributeValue" }, "N": "string", "NS": [ "string" ], "NULL": boolean, "S": + // "string", "SS": [ "string" ] } }, "SizeEstimateRangeGB": [ number ] } ] }" + AWSDynamoDBItemCollectionMetricsKey = attribute.Key("aws.dynamodb.item_collection_metrics") + + // AWSDynamoDBLimitKey is the attribute Key conforming to the + // "aws.dynamodb.limit" semantic conventions. It represents the value of the + // `Limit` request parameter. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 10 + AWSDynamoDBLimitKey = attribute.Key("aws.dynamodb.limit") + + // AWSDynamoDBLocalSecondaryIndexesKey is the attribute Key conforming to the + // "aws.dynamodb.local_secondary_indexes" semantic conventions. It represents + // the JSON-serialized value of each item of the `LocalSecondaryIndexes` request + // field. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "{ "IndexArn": "string", "IndexName": "string", "IndexSizeBytes": + // number, "ItemCount": number, "KeySchema": [ { "AttributeName": "string", + // "KeyType": "string" } ], "Projection": { "NonKeyAttributes": [ "string" ], + // "ProjectionType": "string" } }" + AWSDynamoDBLocalSecondaryIndexesKey = attribute.Key("aws.dynamodb.local_secondary_indexes") + + // AWSDynamoDBProjectionKey is the attribute Key conforming to the + // "aws.dynamodb.projection" semantic conventions. It represents the value of + // the `ProjectionExpression` request parameter. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Title", "Title, Price, Color", "Title, Description, RelatedItems, + // ProductReviews" + AWSDynamoDBProjectionKey = attribute.Key("aws.dynamodb.projection") + + // AWSDynamoDBProvisionedReadCapacityKey is the attribute Key conforming to the + // "aws.dynamodb.provisioned_read_capacity" semantic conventions. It represents + // the value of the `ProvisionedThroughput.ReadCapacityUnits` request parameter. + // + // Type: double + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1.0, 2.0 + AWSDynamoDBProvisionedReadCapacityKey = attribute.Key("aws.dynamodb.provisioned_read_capacity") + + // AWSDynamoDBProvisionedWriteCapacityKey is the attribute Key conforming to the + // "aws.dynamodb.provisioned_write_capacity" semantic conventions. It represents + // the value of the `ProvisionedThroughput.WriteCapacityUnits` request + // parameter. + // + // Type: double + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1.0, 2.0 + AWSDynamoDBProvisionedWriteCapacityKey = attribute.Key("aws.dynamodb.provisioned_write_capacity") + + // AWSDynamoDBScanForwardKey is the attribute Key conforming to the + // "aws.dynamodb.scan_forward" semantic conventions. It represents the value of + // the `ScanIndexForward` request parameter. + // + // Type: boolean + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + AWSDynamoDBScanForwardKey = attribute.Key("aws.dynamodb.scan_forward") + + // AWSDynamoDBScannedCountKey is the attribute Key conforming to the + // "aws.dynamodb.scanned_count" semantic conventions. It represents the value of + // the `ScannedCount` response parameter. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 50 + AWSDynamoDBScannedCountKey = attribute.Key("aws.dynamodb.scanned_count") + + // AWSDynamoDBSegmentKey is the attribute Key conforming to the + // "aws.dynamodb.segment" semantic conventions. It represents the value of the + // `Segment` request parameter. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 10 + AWSDynamoDBSegmentKey = attribute.Key("aws.dynamodb.segment") + + // AWSDynamoDBSelectKey is the attribute Key conforming to the + // "aws.dynamodb.select" semantic conventions. It represents the value of the + // `Select` request parameter. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "ALL_ATTRIBUTES", "COUNT" + AWSDynamoDBSelectKey = attribute.Key("aws.dynamodb.select") + + // AWSDynamoDBTableCountKey is the attribute Key conforming to the + // "aws.dynamodb.table_count" semantic conventions. It represents the number of + // items in the `TableNames` response parameter. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 20 + AWSDynamoDBTableCountKey = attribute.Key("aws.dynamodb.table_count") + + // AWSDynamoDBTableNamesKey is the attribute Key conforming to the + // "aws.dynamodb.table_names" semantic conventions. It represents the keys in + // the `RequestItems` object field. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Users", "Cats" + AWSDynamoDBTableNamesKey = attribute.Key("aws.dynamodb.table_names") + + // AWSDynamoDBTotalSegmentsKey is the attribute Key conforming to the + // "aws.dynamodb.total_segments" semantic conventions. It represents the value + // of the `TotalSegments` request parameter. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 100 + AWSDynamoDBTotalSegmentsKey = attribute.Key("aws.dynamodb.total_segments") + + // AWSECSClusterARNKey is the attribute Key conforming to the + // "aws.ecs.cluster.arn" semantic conventions. It represents the ARN of an + // [ECS cluster]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "arn:aws:ecs:us-west-2:123456789123:cluster/my-cluster" + // + // [ECS cluster]: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/clusters.html + AWSECSClusterARNKey = attribute.Key("aws.ecs.cluster.arn") + + // AWSECSContainerARNKey is the attribute Key conforming to the + // "aws.ecs.container.arn" semantic conventions. It represents the Amazon + // Resource Name (ARN) of an [ECS container instance]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "arn:aws:ecs:us-west-1:123456789123:container/32624152-9086-4f0e-acae-1a75b14fe4d9" + // + // [ECS container instance]: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/ECS_instances.html + AWSECSContainerARNKey = attribute.Key("aws.ecs.container.arn") + + // AWSECSLaunchtypeKey is the attribute Key conforming to the + // "aws.ecs.launchtype" semantic conventions. It represents the [launch type] + // for an ECS task. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // + // [launch type]: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/launch_types.html + AWSECSLaunchtypeKey = attribute.Key("aws.ecs.launchtype") + + // AWSECSTaskARNKey is the attribute Key conforming to the "aws.ecs.task.arn" + // semantic conventions. It represents the ARN of a running [ECS task]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "arn:aws:ecs:us-west-1:123456789123:task/10838bed-421f-43ef-870a-f43feacbbb5b", + // "arn:aws:ecs:us-west-1:123456789123:task/my-cluster/task-id/23ebb8ac-c18f-46c6-8bbe-d55d0e37cfbd" + // + // [ECS task]: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/ecs-account-settings.html#ecs-resource-ids + AWSECSTaskARNKey = attribute.Key("aws.ecs.task.arn") + + // AWSECSTaskFamilyKey is the attribute Key conforming to the + // "aws.ecs.task.family" semantic conventions. It represents the family name of + // the [ECS task definition] used to create the ECS task. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry-family" + // + // [ECS task definition]: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task_definitions.html + AWSECSTaskFamilyKey = attribute.Key("aws.ecs.task.family") + + // AWSECSTaskIDKey is the attribute Key conforming to the "aws.ecs.task.id" + // semantic conventions. It represents the ID of a running ECS task. The ID MUST + // be extracted from `task.arn`. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "10838bed-421f-43ef-870a-f43feacbbb5b", + // "23ebb8ac-c18f-46c6-8bbe-d55d0e37cfbd" + AWSECSTaskIDKey = attribute.Key("aws.ecs.task.id") + + // AWSECSTaskRevisionKey is the attribute Key conforming to the + // "aws.ecs.task.revision" semantic conventions. It represents the revision for + // the task definition used to create the ECS task. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "8", "26" + AWSECSTaskRevisionKey = attribute.Key("aws.ecs.task.revision") + + // AWSEKSClusterARNKey is the attribute Key conforming to the + // "aws.eks.cluster.arn" semantic conventions. It represents the ARN of an EKS + // cluster. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "arn:aws:ecs:us-west-2:123456789123:cluster/my-cluster" + AWSEKSClusterARNKey = attribute.Key("aws.eks.cluster.arn") + + // AWSExtendedRequestIDKey is the attribute Key conforming to the + // "aws.extended_request_id" semantic conventions. It represents the AWS + // extended request ID as returned in the response header `x-amz-id-2`. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "wzHcyEWfmOGDIE5QOhTAqFDoDWP3y8IUvpNINCwL9N4TEHbUw0/gZJ+VZTmCNCWR7fezEN3eCiQ=" + AWSExtendedRequestIDKey = attribute.Key("aws.extended_request_id") + + // AWSKinesisStreamNameKey is the attribute Key conforming to the + // "aws.kinesis.stream_name" semantic conventions. It represents the name of the + // AWS Kinesis [stream] the request refers to. Corresponds to the + // `--stream-name` parameter of the Kinesis [describe-stream] operation. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "some-stream-name" + // + // [stream]: https://docs.aws.amazon.com/streams/latest/dev/introduction.html + // [describe-stream]: https://docs.aws.amazon.com/cli/latest/reference/kinesis/describe-stream.html + AWSKinesisStreamNameKey = attribute.Key("aws.kinesis.stream_name") + + // AWSLambdaInvokedARNKey is the attribute Key conforming to the + // "aws.lambda.invoked_arn" semantic conventions. It represents the full invoked + // ARN as provided on the `Context` passed to the function ( + // `Lambda-Runtime-Invoked-Function-Arn` header on the + // `/runtime/invocation/next` applicable). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "arn:aws:lambda:us-east-1:123456:function:myfunction:myalias" + // Note: This may be different from `cloud.resource_id` if an alias is involved. + AWSLambdaInvokedARNKey = attribute.Key("aws.lambda.invoked_arn") + + // AWSLambdaResourceMappingIDKey is the attribute Key conforming to the + // "aws.lambda.resource_mapping.id" semantic conventions. It represents the UUID + // of the [AWS Lambda EvenSource Mapping]. An event source is mapped to a lambda + // function. It's contents are read by Lambda and used to trigger a function. + // This isn't available in the lambda execution context or the lambda runtime + // environtment. This is going to be populated by the AWS SDK for each language + // when that UUID is present. Some of these operations are + // Create/Delete/Get/List/Update EventSourceMapping. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "587ad24b-03b9-4413-8202-bbd56b36e5b7" + // + // [AWS Lambda EvenSource Mapping]: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lambda-eventsourcemapping.html + AWSLambdaResourceMappingIDKey = attribute.Key("aws.lambda.resource_mapping.id") + + // AWSLogGroupARNsKey is the attribute Key conforming to the + // "aws.log.group.arns" semantic conventions. It represents the Amazon Resource + // Name(s) (ARN) of the AWS log group(s). + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "arn:aws:logs:us-west-1:123456789012:log-group:/aws/my/group:*" + // Note: See the [log group ARN format documentation]. + // + // [log group ARN format documentation]: https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/iam-access-control-overview-cwl.html#CWL_ARN_Format + AWSLogGroupARNsKey = attribute.Key("aws.log.group.arns") + + // AWSLogGroupNamesKey is the attribute Key conforming to the + // "aws.log.group.names" semantic conventions. It represents the name(s) of the + // AWS log group(s) an application is writing to. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "/aws/lambda/my-function", "opentelemetry-service" + // Note: Multiple log groups must be supported for cases like multi-container + // applications, where a single application has sidecar containers, and each + // write to their own log group. + AWSLogGroupNamesKey = attribute.Key("aws.log.group.names") + + // AWSLogStreamARNsKey is the attribute Key conforming to the + // "aws.log.stream.arns" semantic conventions. It represents the ARN(s) of the + // AWS log stream(s). + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "arn:aws:logs:us-west-1:123456789012:log-group:/aws/my/group:log-stream:logs/main/10838bed-421f-43ef-870a-f43feacbbb5b" + // Note: See the [log stream ARN format documentation]. One log group can + // contain several log streams, so these ARNs necessarily identify both a log + // group and a log stream. + // + // [log stream ARN format documentation]: https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/iam-access-control-overview-cwl.html#CWL_ARN_Format + AWSLogStreamARNsKey = attribute.Key("aws.log.stream.arns") + + // AWSLogStreamNamesKey is the attribute Key conforming to the + // "aws.log.stream.names" semantic conventions. It represents the name(s) of the + // AWS log stream(s) an application is writing to. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "logs/main/10838bed-421f-43ef-870a-f43feacbbb5b" + AWSLogStreamNamesKey = attribute.Key("aws.log.stream.names") + + // AWSRequestIDKey is the attribute Key conforming to the "aws.request_id" + // semantic conventions. It represents the AWS request ID as returned in the + // response headers `x-amzn-requestid`, `x-amzn-request-id` or + // `x-amz-request-id`. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "79b9da39-b7ae-508a-a6bc-864b2829c622", "C9ER4AJX75574TDJ" + AWSRequestIDKey = attribute.Key("aws.request_id") + + // AWSS3BucketKey is the attribute Key conforming to the "aws.s3.bucket" + // semantic conventions. It represents the S3 bucket name the request refers to. + // Corresponds to the `--bucket` parameter of the [S3 API] operations. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "some-bucket-name" + // Note: The `bucket` attribute is applicable to all S3 operations that + // reference a bucket, i.e. that require the bucket name as a mandatory + // parameter. + // This applies to almost all S3 operations except `list-buckets`. + // + // [S3 API]: https://docs.aws.amazon.com/cli/latest/reference/s3api/index.html + AWSS3BucketKey = attribute.Key("aws.s3.bucket") + + // AWSS3CopySourceKey is the attribute Key conforming to the + // "aws.s3.copy_source" semantic conventions. It represents the source object + // (in the form `bucket`/`key`) for the copy operation. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "someFile.yml" + // Note: The `copy_source` attribute applies to S3 copy operations and + // corresponds to the `--copy-source` parameter + // of the [copy-object operation within the S3 API]. + // This applies in particular to the following operations: + // + // - [copy-object] + // - [upload-part-copy] + // + // + // [copy-object operation within the S3 API]: https://docs.aws.amazon.com/cli/latest/reference/s3api/copy-object.html + // [copy-object]: https://docs.aws.amazon.com/cli/latest/reference/s3api/copy-object.html + // [upload-part-copy]: https://docs.aws.amazon.com/cli/latest/reference/s3api/upload-part-copy.html + AWSS3CopySourceKey = attribute.Key("aws.s3.copy_source") + + // AWSS3DeleteKey is the attribute Key conforming to the "aws.s3.delete" + // semantic conventions. It represents the delete request container that + // specifies the objects to be deleted. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "Objects=[{Key=string,VersionId=string},{Key=string,VersionId=string}],Quiet=boolean" + // Note: The `delete` attribute is only applicable to the [delete-object] + // operation. + // The `delete` attribute corresponds to the `--delete` parameter of the + // [delete-objects operation within the S3 API]. + // + // [delete-object]: https://docs.aws.amazon.com/cli/latest/reference/s3api/delete-object.html + // [delete-objects operation within the S3 API]: https://docs.aws.amazon.com/cli/latest/reference/s3api/delete-objects.html + AWSS3DeleteKey = attribute.Key("aws.s3.delete") + + // AWSS3KeyKey is the attribute Key conforming to the "aws.s3.key" semantic + // conventions. It represents the S3 object key the request refers to. + // Corresponds to the `--key` parameter of the [S3 API] operations. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "someFile.yml" + // Note: The `key` attribute is applicable to all object-related S3 operations, + // i.e. that require the object key as a mandatory parameter. + // This applies in particular to the following operations: + // + // - [copy-object] + // - [delete-object] + // - [get-object] + // - [head-object] + // - [put-object] + // - [restore-object] + // - [select-object-content] + // - [abort-multipart-upload] + // - [complete-multipart-upload] + // - [create-multipart-upload] + // - [list-parts] + // - [upload-part] + // - [upload-part-copy] + // + // + // [S3 API]: https://docs.aws.amazon.com/cli/latest/reference/s3api/index.html + // [copy-object]: https://docs.aws.amazon.com/cli/latest/reference/s3api/copy-object.html + // [delete-object]: https://docs.aws.amazon.com/cli/latest/reference/s3api/delete-object.html + // [get-object]: https://docs.aws.amazon.com/cli/latest/reference/s3api/get-object.html + // [head-object]: https://docs.aws.amazon.com/cli/latest/reference/s3api/head-object.html + // [put-object]: https://docs.aws.amazon.com/cli/latest/reference/s3api/put-object.html + // [restore-object]: https://docs.aws.amazon.com/cli/latest/reference/s3api/restore-object.html + // [select-object-content]: https://docs.aws.amazon.com/cli/latest/reference/s3api/select-object-content.html + // [abort-multipart-upload]: https://docs.aws.amazon.com/cli/latest/reference/s3api/abort-multipart-upload.html + // [complete-multipart-upload]: https://docs.aws.amazon.com/cli/latest/reference/s3api/complete-multipart-upload.html + // [create-multipart-upload]: https://docs.aws.amazon.com/cli/latest/reference/s3api/create-multipart-upload.html + // [list-parts]: https://docs.aws.amazon.com/cli/latest/reference/s3api/list-parts.html + // [upload-part]: https://docs.aws.amazon.com/cli/latest/reference/s3api/upload-part.html + // [upload-part-copy]: https://docs.aws.amazon.com/cli/latest/reference/s3api/upload-part-copy.html + AWSS3KeyKey = attribute.Key("aws.s3.key") + + // AWSS3PartNumberKey is the attribute Key conforming to the + // "aws.s3.part_number" semantic conventions. It represents the part number of + // the part being uploaded in a multipart-upload operation. This is a positive + // integer between 1 and 10,000. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 3456 + // Note: The `part_number` attribute is only applicable to the [upload-part] + // and [upload-part-copy] operations. + // The `part_number` attribute corresponds to the `--part-number` parameter of + // the + // [upload-part operation within the S3 API]. + // + // [upload-part]: https://docs.aws.amazon.com/cli/latest/reference/s3api/upload-part.html + // [upload-part-copy]: https://docs.aws.amazon.com/cli/latest/reference/s3api/upload-part-copy.html + // [upload-part operation within the S3 API]: https://docs.aws.amazon.com/cli/latest/reference/s3api/upload-part.html + AWSS3PartNumberKey = attribute.Key("aws.s3.part_number") + + // AWSS3UploadIDKey is the attribute Key conforming to the "aws.s3.upload_id" + // semantic conventions. It represents the upload ID that identifies the + // multipart upload. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "dfRtDYWFbkRONycy.Yxwh66Yjlx.cph0gtNBtJ" + // Note: The `upload_id` attribute applies to S3 multipart-upload operations and + // corresponds to the `--upload-id` parameter + // of the [S3 API] multipart operations. + // This applies in particular to the following operations: + // + // - [abort-multipart-upload] + // - [complete-multipart-upload] + // - [list-parts] + // - [upload-part] + // - [upload-part-copy] + // + // + // [S3 API]: https://docs.aws.amazon.com/cli/latest/reference/s3api/index.html + // [abort-multipart-upload]: https://docs.aws.amazon.com/cli/latest/reference/s3api/abort-multipart-upload.html + // [complete-multipart-upload]: https://docs.aws.amazon.com/cli/latest/reference/s3api/complete-multipart-upload.html + // [list-parts]: https://docs.aws.amazon.com/cli/latest/reference/s3api/list-parts.html + // [upload-part]: https://docs.aws.amazon.com/cli/latest/reference/s3api/upload-part.html + // [upload-part-copy]: https://docs.aws.amazon.com/cli/latest/reference/s3api/upload-part-copy.html + AWSS3UploadIDKey = attribute.Key("aws.s3.upload_id") + + // AWSSecretsmanagerSecretARNKey is the attribute Key conforming to the + // "aws.secretsmanager.secret.arn" semantic conventions. It represents the ARN + // of the Secret stored in the Secrets Mangger. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "arn:aws:secretsmanager:us-east-1:123456789012:secret:SecretName-6RandomCharacters" + AWSSecretsmanagerSecretARNKey = attribute.Key("aws.secretsmanager.secret.arn") + + // AWSSNSTopicARNKey is the attribute Key conforming to the "aws.sns.topic.arn" + // semantic conventions. It represents the ARN of the AWS SNS Topic. An Amazon + // SNS [topic] is a logical access point that acts as a communication channel. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "arn:aws:sns:us-east-1:123456789012:mystack-mytopic-NZJ5JSMVGFIE" + // + // [topic]: https://docs.aws.amazon.com/sns/latest/dg/sns-create-topic.html + AWSSNSTopicARNKey = attribute.Key("aws.sns.topic.arn") + + // AWSSQSQueueURLKey is the attribute Key conforming to the "aws.sqs.queue.url" + // semantic conventions. It represents the URL of the AWS SQS Queue. It's a + // unique identifier for a queue in Amazon Simple Queue Service (SQS) and is + // used to access the queue and perform actions on it. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "https://sqs.us-east-1.amazonaws.com/123456789012/MyQueue" + AWSSQSQueueURLKey = attribute.Key("aws.sqs.queue.url") + + // AWSStepFunctionsActivityARNKey is the attribute Key conforming to the + // "aws.step_functions.activity.arn" semantic conventions. It represents the ARN + // of the AWS Step Functions Activity. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "arn:aws:states:us-east-1:123456789012:activity:get-greeting" + AWSStepFunctionsActivityARNKey = attribute.Key("aws.step_functions.activity.arn") + + // AWSStepFunctionsStateMachineARNKey is the attribute Key conforming to the + // "aws.step_functions.state_machine.arn" semantic conventions. It represents + // the ARN of the AWS Step Functions State Machine. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "arn:aws:states:us-east-1:123456789012:stateMachine:myStateMachine:1" + AWSStepFunctionsStateMachineARNKey = attribute.Key("aws.step_functions.state_machine.arn") +) + +// AWSBedrockGuardrailID returns an attribute KeyValue conforming to the +// "aws.bedrock.guardrail.id" semantic conventions. It represents the unique +// identifier of the AWS Bedrock Guardrail. A [guardrail] helps safeguard and +// prevent unwanted behavior from model responses or user messages. +// +// [guardrail]: https://docs.aws.amazon.com/bedrock/latest/userguide/guardrails.html +func AWSBedrockGuardrailID(val string) attribute.KeyValue { + return AWSBedrockGuardrailIDKey.String(val) +} + +// AWSBedrockKnowledgeBaseID returns an attribute KeyValue conforming to the +// "aws.bedrock.knowledge_base.id" semantic conventions. It represents the unique +// identifier of the AWS Bedrock Knowledge base. A [knowledge base] is a bank of +// information that can be queried by models to generate more relevant responses +// and augment prompts. +// +// [knowledge base]: https://docs.aws.amazon.com/bedrock/latest/userguide/knowledge-base.html +func AWSBedrockKnowledgeBaseID(val string) attribute.KeyValue { + return AWSBedrockKnowledgeBaseIDKey.String(val) +} + +// AWSDynamoDBAttributeDefinitions returns an attribute KeyValue conforming to +// the "aws.dynamodb.attribute_definitions" semantic conventions. It represents +// the JSON-serialized value of each item in the `AttributeDefinitions` request +// field. +func AWSDynamoDBAttributeDefinitions(val ...string) attribute.KeyValue { + return AWSDynamoDBAttributeDefinitionsKey.StringSlice(val) +} + +// AWSDynamoDBAttributesToGet returns an attribute KeyValue conforming to the +// "aws.dynamodb.attributes_to_get" semantic conventions. It represents the value +// of the `AttributesToGet` request parameter. +func AWSDynamoDBAttributesToGet(val ...string) attribute.KeyValue { + return AWSDynamoDBAttributesToGetKey.StringSlice(val) +} + +// AWSDynamoDBConsistentRead returns an attribute KeyValue conforming to the +// "aws.dynamodb.consistent_read" semantic conventions. It represents the value +// of the `ConsistentRead` request parameter. +func AWSDynamoDBConsistentRead(val bool) attribute.KeyValue { + return AWSDynamoDBConsistentReadKey.Bool(val) +} + +// AWSDynamoDBConsumedCapacity returns an attribute KeyValue conforming to the +// "aws.dynamodb.consumed_capacity" semantic conventions. It represents the +// JSON-serialized value of each item in the `ConsumedCapacity` response field. +func AWSDynamoDBConsumedCapacity(val ...string) attribute.KeyValue { + return AWSDynamoDBConsumedCapacityKey.StringSlice(val) +} + +// AWSDynamoDBCount returns an attribute KeyValue conforming to the +// "aws.dynamodb.count" semantic conventions. It represents the value of the +// `Count` response parameter. +func AWSDynamoDBCount(val int) attribute.KeyValue { + return AWSDynamoDBCountKey.Int(val) +} + +// AWSDynamoDBExclusiveStartTable returns an attribute KeyValue conforming to the +// "aws.dynamodb.exclusive_start_table" semantic conventions. It represents the +// value of the `ExclusiveStartTableName` request parameter. +func AWSDynamoDBExclusiveStartTable(val string) attribute.KeyValue { + return AWSDynamoDBExclusiveStartTableKey.String(val) +} + +// AWSDynamoDBGlobalSecondaryIndexUpdates returns an attribute KeyValue +// conforming to the "aws.dynamodb.global_secondary_index_updates" semantic +// conventions. It represents the JSON-serialized value of each item in the +// `GlobalSecondaryIndexUpdates` request field. +func AWSDynamoDBGlobalSecondaryIndexUpdates(val ...string) attribute.KeyValue { + return AWSDynamoDBGlobalSecondaryIndexUpdatesKey.StringSlice(val) +} + +// AWSDynamoDBGlobalSecondaryIndexes returns an attribute KeyValue conforming to +// the "aws.dynamodb.global_secondary_indexes" semantic conventions. It +// represents the JSON-serialized value of each item of the +// `GlobalSecondaryIndexes` request field. +func AWSDynamoDBGlobalSecondaryIndexes(val ...string) attribute.KeyValue { + return AWSDynamoDBGlobalSecondaryIndexesKey.StringSlice(val) +} + +// AWSDynamoDBIndexName returns an attribute KeyValue conforming to the +// "aws.dynamodb.index_name" semantic conventions. It represents the value of the +// `IndexName` request parameter. +func AWSDynamoDBIndexName(val string) attribute.KeyValue { + return AWSDynamoDBIndexNameKey.String(val) +} + +// AWSDynamoDBItemCollectionMetrics returns an attribute KeyValue conforming to +// the "aws.dynamodb.item_collection_metrics" semantic conventions. It represents +// the JSON-serialized value of the `ItemCollectionMetrics` response field. +func AWSDynamoDBItemCollectionMetrics(val string) attribute.KeyValue { + return AWSDynamoDBItemCollectionMetricsKey.String(val) +} + +// AWSDynamoDBLimit returns an attribute KeyValue conforming to the +// "aws.dynamodb.limit" semantic conventions. It represents the value of the +// `Limit` request parameter. +func AWSDynamoDBLimit(val int) attribute.KeyValue { + return AWSDynamoDBLimitKey.Int(val) +} + +// AWSDynamoDBLocalSecondaryIndexes returns an attribute KeyValue conforming to +// the "aws.dynamodb.local_secondary_indexes" semantic conventions. It represents +// the JSON-serialized value of each item of the `LocalSecondaryIndexes` request +// field. +func AWSDynamoDBLocalSecondaryIndexes(val ...string) attribute.KeyValue { + return AWSDynamoDBLocalSecondaryIndexesKey.StringSlice(val) +} + +// AWSDynamoDBProjection returns an attribute KeyValue conforming to the +// "aws.dynamodb.projection" semantic conventions. It represents the value of the +// `ProjectionExpression` request parameter. +func AWSDynamoDBProjection(val string) attribute.KeyValue { + return AWSDynamoDBProjectionKey.String(val) +} + +// AWSDynamoDBProvisionedReadCapacity returns an attribute KeyValue conforming to +// the "aws.dynamodb.provisioned_read_capacity" semantic conventions. It +// represents the value of the `ProvisionedThroughput.ReadCapacityUnits` request +// parameter. +func AWSDynamoDBProvisionedReadCapacity(val float64) attribute.KeyValue { + return AWSDynamoDBProvisionedReadCapacityKey.Float64(val) +} + +// AWSDynamoDBProvisionedWriteCapacity returns an attribute KeyValue conforming +// to the "aws.dynamodb.provisioned_write_capacity" semantic conventions. It +// represents the value of the `ProvisionedThroughput.WriteCapacityUnits` request +// parameter. +func AWSDynamoDBProvisionedWriteCapacity(val float64) attribute.KeyValue { + return AWSDynamoDBProvisionedWriteCapacityKey.Float64(val) +} + +// AWSDynamoDBScanForward returns an attribute KeyValue conforming to the +// "aws.dynamodb.scan_forward" semantic conventions. It represents the value of +// the `ScanIndexForward` request parameter. +func AWSDynamoDBScanForward(val bool) attribute.KeyValue { + return AWSDynamoDBScanForwardKey.Bool(val) +} + +// AWSDynamoDBScannedCount returns an attribute KeyValue conforming to the +// "aws.dynamodb.scanned_count" semantic conventions. It represents the value of +// the `ScannedCount` response parameter. +func AWSDynamoDBScannedCount(val int) attribute.KeyValue { + return AWSDynamoDBScannedCountKey.Int(val) +} + +// AWSDynamoDBSegment returns an attribute KeyValue conforming to the +// "aws.dynamodb.segment" semantic conventions. It represents the value of the +// `Segment` request parameter. +func AWSDynamoDBSegment(val int) attribute.KeyValue { + return AWSDynamoDBSegmentKey.Int(val) +} + +// AWSDynamoDBSelect returns an attribute KeyValue conforming to the +// "aws.dynamodb.select" semantic conventions. It represents the value of the +// `Select` request parameter. +func AWSDynamoDBSelect(val string) attribute.KeyValue { + return AWSDynamoDBSelectKey.String(val) +} + +// AWSDynamoDBTableCount returns an attribute KeyValue conforming to the +// "aws.dynamodb.table_count" semantic conventions. It represents the number of +// items in the `TableNames` response parameter. +func AWSDynamoDBTableCount(val int) attribute.KeyValue { + return AWSDynamoDBTableCountKey.Int(val) +} + +// AWSDynamoDBTableNames returns an attribute KeyValue conforming to the +// "aws.dynamodb.table_names" semantic conventions. It represents the keys in the +// `RequestItems` object field. +func AWSDynamoDBTableNames(val ...string) attribute.KeyValue { + return AWSDynamoDBTableNamesKey.StringSlice(val) +} + +// AWSDynamoDBTotalSegments returns an attribute KeyValue conforming to the +// "aws.dynamodb.total_segments" semantic conventions. It represents the value of +// the `TotalSegments` request parameter. +func AWSDynamoDBTotalSegments(val int) attribute.KeyValue { + return AWSDynamoDBTotalSegmentsKey.Int(val) +} + +// AWSECSClusterARN returns an attribute KeyValue conforming to the +// "aws.ecs.cluster.arn" semantic conventions. It represents the ARN of an +// [ECS cluster]. +// +// [ECS cluster]: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/clusters.html +func AWSECSClusterARN(val string) attribute.KeyValue { + return AWSECSClusterARNKey.String(val) +} + +// AWSECSContainerARN returns an attribute KeyValue conforming to the +// "aws.ecs.container.arn" semantic conventions. It represents the Amazon +// Resource Name (ARN) of an [ECS container instance]. +// +// [ECS container instance]: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/ECS_instances.html +func AWSECSContainerARN(val string) attribute.KeyValue { + return AWSECSContainerARNKey.String(val) +} + +// AWSECSTaskARN returns an attribute KeyValue conforming to the +// "aws.ecs.task.arn" semantic conventions. It represents the ARN of a running +// [ECS task]. +// +// [ECS task]: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/ecs-account-settings.html#ecs-resource-ids +func AWSECSTaskARN(val string) attribute.KeyValue { + return AWSECSTaskARNKey.String(val) +} + +// AWSECSTaskFamily returns an attribute KeyValue conforming to the +// "aws.ecs.task.family" semantic conventions. It represents the family name of +// the [ECS task definition] used to create the ECS task. +// +// [ECS task definition]: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task_definitions.html +func AWSECSTaskFamily(val string) attribute.KeyValue { + return AWSECSTaskFamilyKey.String(val) +} + +// AWSECSTaskID returns an attribute KeyValue conforming to the "aws.ecs.task.id" +// semantic conventions. It represents the ID of a running ECS task. The ID MUST +// be extracted from `task.arn`. +func AWSECSTaskID(val string) attribute.KeyValue { + return AWSECSTaskIDKey.String(val) +} + +// AWSECSTaskRevision returns an attribute KeyValue conforming to the +// "aws.ecs.task.revision" semantic conventions. It represents the revision for +// the task definition used to create the ECS task. +func AWSECSTaskRevision(val string) attribute.KeyValue { + return AWSECSTaskRevisionKey.String(val) +} + +// AWSEKSClusterARN returns an attribute KeyValue conforming to the +// "aws.eks.cluster.arn" semantic conventions. It represents the ARN of an EKS +// cluster. +func AWSEKSClusterARN(val string) attribute.KeyValue { + return AWSEKSClusterARNKey.String(val) +} + +// AWSExtendedRequestID returns an attribute KeyValue conforming to the +// "aws.extended_request_id" semantic conventions. It represents the AWS extended +// request ID as returned in the response header `x-amz-id-2`. +func AWSExtendedRequestID(val string) attribute.KeyValue { + return AWSExtendedRequestIDKey.String(val) +} + +// AWSKinesisStreamName returns an attribute KeyValue conforming to the +// "aws.kinesis.stream_name" semantic conventions. It represents the name of the +// AWS Kinesis [stream] the request refers to. Corresponds to the `--stream-name` +// parameter of the Kinesis [describe-stream] operation. +// +// [stream]: https://docs.aws.amazon.com/streams/latest/dev/introduction.html +// [describe-stream]: https://docs.aws.amazon.com/cli/latest/reference/kinesis/describe-stream.html +func AWSKinesisStreamName(val string) attribute.KeyValue { + return AWSKinesisStreamNameKey.String(val) +} + +// AWSLambdaInvokedARN returns an attribute KeyValue conforming to the +// "aws.lambda.invoked_arn" semantic conventions. It represents the full invoked +// ARN as provided on the `Context` passed to the function ( +// `Lambda-Runtime-Invoked-Function-Arn` header on the `/runtime/invocation/next` +// applicable). +func AWSLambdaInvokedARN(val string) attribute.KeyValue { + return AWSLambdaInvokedARNKey.String(val) +} + +// AWSLambdaResourceMappingID returns an attribute KeyValue conforming to the +// "aws.lambda.resource_mapping.id" semantic conventions. It represents the UUID +// of the [AWS Lambda EvenSource Mapping]. An event source is mapped to a lambda +// function. It's contents are read by Lambda and used to trigger a function. +// This isn't available in the lambda execution context or the lambda runtime +// environtment. This is going to be populated by the AWS SDK for each language +// when that UUID is present. Some of these operations are +// Create/Delete/Get/List/Update EventSourceMapping. +// +// [AWS Lambda EvenSource Mapping]: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lambda-eventsourcemapping.html +func AWSLambdaResourceMappingID(val string) attribute.KeyValue { + return AWSLambdaResourceMappingIDKey.String(val) +} + +// AWSLogGroupARNs returns an attribute KeyValue conforming to the +// "aws.log.group.arns" semantic conventions. It represents the Amazon Resource +// Name(s) (ARN) of the AWS log group(s). +func AWSLogGroupARNs(val ...string) attribute.KeyValue { + return AWSLogGroupARNsKey.StringSlice(val) +} + +// AWSLogGroupNames returns an attribute KeyValue conforming to the +// "aws.log.group.names" semantic conventions. It represents the name(s) of the +// AWS log group(s) an application is writing to. +func AWSLogGroupNames(val ...string) attribute.KeyValue { + return AWSLogGroupNamesKey.StringSlice(val) +} + +// AWSLogStreamARNs returns an attribute KeyValue conforming to the +// "aws.log.stream.arns" semantic conventions. It represents the ARN(s) of the +// AWS log stream(s). +func AWSLogStreamARNs(val ...string) attribute.KeyValue { + return AWSLogStreamARNsKey.StringSlice(val) +} + +// AWSLogStreamNames returns an attribute KeyValue conforming to the +// "aws.log.stream.names" semantic conventions. It represents the name(s) of the +// AWS log stream(s) an application is writing to. +func AWSLogStreamNames(val ...string) attribute.KeyValue { + return AWSLogStreamNamesKey.StringSlice(val) +} + +// AWSRequestID returns an attribute KeyValue conforming to the "aws.request_id" +// semantic conventions. It represents the AWS request ID as returned in the +// response headers `x-amzn-requestid`, `x-amzn-request-id` or `x-amz-request-id` +// . +func AWSRequestID(val string) attribute.KeyValue { + return AWSRequestIDKey.String(val) +} + +// AWSS3Bucket returns an attribute KeyValue conforming to the "aws.s3.bucket" +// semantic conventions. It represents the S3 bucket name the request refers to. +// Corresponds to the `--bucket` parameter of the [S3 API] operations. +// +// [S3 API]: https://docs.aws.amazon.com/cli/latest/reference/s3api/index.html +func AWSS3Bucket(val string) attribute.KeyValue { + return AWSS3BucketKey.String(val) +} + +// AWSS3CopySource returns an attribute KeyValue conforming to the +// "aws.s3.copy_source" semantic conventions. It represents the source object (in +// the form `bucket`/`key`) for the copy operation. +func AWSS3CopySource(val string) attribute.KeyValue { + return AWSS3CopySourceKey.String(val) +} + +// AWSS3Delete returns an attribute KeyValue conforming to the "aws.s3.delete" +// semantic conventions. It represents the delete request container that +// specifies the objects to be deleted. +func AWSS3Delete(val string) attribute.KeyValue { + return AWSS3DeleteKey.String(val) +} + +// AWSS3Key returns an attribute KeyValue conforming to the "aws.s3.key" semantic +// conventions. It represents the S3 object key the request refers to. +// Corresponds to the `--key` parameter of the [S3 API] operations. +// +// [S3 API]: https://docs.aws.amazon.com/cli/latest/reference/s3api/index.html +func AWSS3Key(val string) attribute.KeyValue { + return AWSS3KeyKey.String(val) +} + +// AWSS3PartNumber returns an attribute KeyValue conforming to the +// "aws.s3.part_number" semantic conventions. It represents the part number of +// the part being uploaded in a multipart-upload operation. This is a positive +// integer between 1 and 10,000. +func AWSS3PartNumber(val int) attribute.KeyValue { + return AWSS3PartNumberKey.Int(val) +} + +// AWSS3UploadID returns an attribute KeyValue conforming to the +// "aws.s3.upload_id" semantic conventions. It represents the upload ID that +// identifies the multipart upload. +func AWSS3UploadID(val string) attribute.KeyValue { + return AWSS3UploadIDKey.String(val) +} + +// AWSSecretsmanagerSecretARN returns an attribute KeyValue conforming to the +// "aws.secretsmanager.secret.arn" semantic conventions. It represents the ARN of +// the Secret stored in the Secrets Mangger. +func AWSSecretsmanagerSecretARN(val string) attribute.KeyValue { + return AWSSecretsmanagerSecretARNKey.String(val) +} + +// AWSSNSTopicARN returns an attribute KeyValue conforming to the +// "aws.sns.topic.arn" semantic conventions. It represents the ARN of the AWS SNS +// Topic. An Amazon SNS [topic] is a logical access point that acts as a +// communication channel. +// +// [topic]: https://docs.aws.amazon.com/sns/latest/dg/sns-create-topic.html +func AWSSNSTopicARN(val string) attribute.KeyValue { + return AWSSNSTopicARNKey.String(val) +} + +// AWSSQSQueueURL returns an attribute KeyValue conforming to the +// "aws.sqs.queue.url" semantic conventions. It represents the URL of the AWS SQS +// Queue. It's a unique identifier for a queue in Amazon Simple Queue Service +// (SQS) and is used to access the queue and perform actions on it. +func AWSSQSQueueURL(val string) attribute.KeyValue { + return AWSSQSQueueURLKey.String(val) +} + +// AWSStepFunctionsActivityARN returns an attribute KeyValue conforming to the +// "aws.step_functions.activity.arn" semantic conventions. It represents the ARN +// of the AWS Step Functions Activity. +func AWSStepFunctionsActivityARN(val string) attribute.KeyValue { + return AWSStepFunctionsActivityARNKey.String(val) +} + +// AWSStepFunctionsStateMachineARN returns an attribute KeyValue conforming to +// the "aws.step_functions.state_machine.arn" semantic conventions. It represents +// the ARN of the AWS Step Functions State Machine. +func AWSStepFunctionsStateMachineARN(val string) attribute.KeyValue { + return AWSStepFunctionsStateMachineARNKey.String(val) +} + +// Enum values for aws.ecs.launchtype +var ( + // ec2 + // Stability: development + AWSECSLaunchtypeEC2 = AWSECSLaunchtypeKey.String("ec2") + // fargate + // Stability: development + AWSECSLaunchtypeFargate = AWSECSLaunchtypeKey.String("fargate") +) + +// Namespace: az +const ( + // AzNamespaceKey is the attribute Key conforming to the "az.namespace" semantic + // conventions. It represents the [Azure Resource Provider Namespace] as + // recognized by the client. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Microsoft.Storage", "Microsoft.KeyVault", "Microsoft.ServiceBus" + // + // [Azure Resource Provider Namespace]: https://learn.microsoft.com/azure/azure-resource-manager/management/azure-services-resource-providers + AzNamespaceKey = attribute.Key("az.namespace") + + // AzServiceRequestIDKey is the attribute Key conforming to the + // "az.service_request_id" semantic conventions. It represents the unique + // identifier of the service request. It's generated by the Azure service and + // returned with the response. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "00000000-0000-0000-0000-000000000000" + AzServiceRequestIDKey = attribute.Key("az.service_request_id") +) + +// AzNamespace returns an attribute KeyValue conforming to the "az.namespace" +// semantic conventions. It represents the [Azure Resource Provider Namespace] as +// recognized by the client. +// +// [Azure Resource Provider Namespace]: https://learn.microsoft.com/azure/azure-resource-manager/management/azure-services-resource-providers +func AzNamespace(val string) attribute.KeyValue { + return AzNamespaceKey.String(val) +} + +// AzServiceRequestID returns an attribute KeyValue conforming to the +// "az.service_request_id" semantic conventions. It represents the unique +// identifier of the service request. It's generated by the Azure service and +// returned with the response. +func AzServiceRequestID(val string) attribute.KeyValue { + return AzServiceRequestIDKey.String(val) +} + +// Namespace: azure +const ( + // AzureClientIDKey is the attribute Key conforming to the "azure.client.id" + // semantic conventions. It represents the unique identifier of the client + // instance. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "3ba4827d-4422-483f-b59f-85b74211c11d", "storage-client-1" + AzureClientIDKey = attribute.Key("azure.client.id") + + // AzureCosmosDBConnectionModeKey is the attribute Key conforming to the + // "azure.cosmosdb.connection.mode" semantic conventions. It represents the + // cosmos client connection mode. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + AzureCosmosDBConnectionModeKey = attribute.Key("azure.cosmosdb.connection.mode") + + // AzureCosmosDBConsistencyLevelKey is the attribute Key conforming to the + // "azure.cosmosdb.consistency.level" semantic conventions. It represents the + // account or request [consistency level]. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Eventual", "ConsistentPrefix", "BoundedStaleness", "Strong", + // "Session" + // + // [consistency level]: https://learn.microsoft.com/azure/cosmos-db/consistency-levels + AzureCosmosDBConsistencyLevelKey = attribute.Key("azure.cosmosdb.consistency.level") + + // AzureCosmosDBOperationContactedRegionsKey is the attribute Key conforming to + // the "azure.cosmosdb.operation.contacted_regions" semantic conventions. It + // represents the list of regions contacted during operation in the order that + // they were contacted. If there is more than one region listed, it indicates + // that the operation was performed on multiple regions i.e. cross-regional + // call. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "North Central US", "Australia East", "Australia Southeast" + // Note: Region name matches the format of `displayName` in [Azure Location API] + // + // [Azure Location API]: https://learn.microsoft.com/rest/api/subscription/subscriptions/list-locations?view=rest-subscription-2021-10-01&tabs=HTTP#location + AzureCosmosDBOperationContactedRegionsKey = attribute.Key("azure.cosmosdb.operation.contacted_regions") + + // AzureCosmosDBOperationRequestChargeKey is the attribute Key conforming to the + // "azure.cosmosdb.operation.request_charge" semantic conventions. It represents + // the number of request units consumed by the operation. + // + // Type: double + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 46.18, 1.0 + AzureCosmosDBOperationRequestChargeKey = attribute.Key("azure.cosmosdb.operation.request_charge") + + // AzureCosmosDBRequestBodySizeKey is the attribute Key conforming to the + // "azure.cosmosdb.request.body.size" semantic conventions. It represents the + // request payload size in bytes. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + AzureCosmosDBRequestBodySizeKey = attribute.Key("azure.cosmosdb.request.body.size") + + // AzureCosmosDBResponseSubStatusCodeKey is the attribute Key conforming to the + // "azure.cosmosdb.response.sub_status_code" semantic conventions. It represents + // the cosmos DB sub status code. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1000, 1002 + AzureCosmosDBResponseSubStatusCodeKey = attribute.Key("azure.cosmosdb.response.sub_status_code") +) + +// AzureClientID returns an attribute KeyValue conforming to the +// "azure.client.id" semantic conventions. It represents the unique identifier of +// the client instance. +func AzureClientID(val string) attribute.KeyValue { + return AzureClientIDKey.String(val) +} + +// AzureCosmosDBOperationContactedRegions returns an attribute KeyValue +// conforming to the "azure.cosmosdb.operation.contacted_regions" semantic +// conventions. It represents the list of regions contacted during operation in +// the order that they were contacted. If there is more than one region listed, +// it indicates that the operation was performed on multiple regions i.e. +// cross-regional call. +func AzureCosmosDBOperationContactedRegions(val ...string) attribute.KeyValue { + return AzureCosmosDBOperationContactedRegionsKey.StringSlice(val) +} + +// AzureCosmosDBOperationRequestCharge returns an attribute KeyValue conforming +// to the "azure.cosmosdb.operation.request_charge" semantic conventions. It +// represents the number of request units consumed by the operation. +func AzureCosmosDBOperationRequestCharge(val float64) attribute.KeyValue { + return AzureCosmosDBOperationRequestChargeKey.Float64(val) +} + +// AzureCosmosDBRequestBodySize returns an attribute KeyValue conforming to the +// "azure.cosmosdb.request.body.size" semantic conventions. It represents the +// request payload size in bytes. +func AzureCosmosDBRequestBodySize(val int) attribute.KeyValue { + return AzureCosmosDBRequestBodySizeKey.Int(val) +} + +// AzureCosmosDBResponseSubStatusCode returns an attribute KeyValue conforming to +// the "azure.cosmosdb.response.sub_status_code" semantic conventions. It +// represents the cosmos DB sub status code. +func AzureCosmosDBResponseSubStatusCode(val int) attribute.KeyValue { + return AzureCosmosDBResponseSubStatusCodeKey.Int(val) +} + +// Enum values for azure.cosmosdb.connection.mode +var ( + // Gateway (HTTP) connection. + // Stability: development + AzureCosmosDBConnectionModeGateway = AzureCosmosDBConnectionModeKey.String("gateway") + // Direct connection. + // Stability: development + AzureCosmosDBConnectionModeDirect = AzureCosmosDBConnectionModeKey.String("direct") +) + +// Enum values for azure.cosmosdb.consistency.level +var ( + // strong + // Stability: development + AzureCosmosDBConsistencyLevelStrong = AzureCosmosDBConsistencyLevelKey.String("Strong") + // bounded_staleness + // Stability: development + AzureCosmosDBConsistencyLevelBoundedStaleness = AzureCosmosDBConsistencyLevelKey.String("BoundedStaleness") + // session + // Stability: development + AzureCosmosDBConsistencyLevelSession = AzureCosmosDBConsistencyLevelKey.String("Session") + // eventual + // Stability: development + AzureCosmosDBConsistencyLevelEventual = AzureCosmosDBConsistencyLevelKey.String("Eventual") + // consistent_prefix + // Stability: development + AzureCosmosDBConsistencyLevelConsistentPrefix = AzureCosmosDBConsistencyLevelKey.String("ConsistentPrefix") +) + +// Namespace: browser +const ( + // BrowserBrandsKey is the attribute Key conforming to the "browser.brands" + // semantic conventions. It represents the array of brand name and version + // separated by a space. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: " Not A;Brand 99", "Chromium 99", "Chrome 99" + // Note: This value is intended to be taken from the [UA client hints API] ( + // `navigator.userAgentData.brands`). + // + // [UA client hints API]: https://wicg.github.io/ua-client-hints/#interface + BrowserBrandsKey = attribute.Key("browser.brands") + + // BrowserLanguageKey is the attribute Key conforming to the "browser.language" + // semantic conventions. It represents the preferred language of the user using + // the browser. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "en", "en-US", "fr", "fr-FR" + // Note: This value is intended to be taken from the Navigator API + // `navigator.language`. + BrowserLanguageKey = attribute.Key("browser.language") + + // BrowserMobileKey is the attribute Key conforming to the "browser.mobile" + // semantic conventions. It represents a boolean that is true if the browser is + // running on a mobile device. + // + // Type: boolean + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: This value is intended to be taken from the [UA client hints API] ( + // `navigator.userAgentData.mobile`). If unavailable, this attribute SHOULD be + // left unset. + // + // [UA client hints API]: https://wicg.github.io/ua-client-hints/#interface + BrowserMobileKey = attribute.Key("browser.mobile") + + // BrowserPlatformKey is the attribute Key conforming to the "browser.platform" + // semantic conventions. It represents the platform on which the browser is + // running. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Windows", "macOS", "Android" + // Note: This value is intended to be taken from the [UA client hints API] ( + // `navigator.userAgentData.platform`). If unavailable, the legacy + // `navigator.platform` API SHOULD NOT be used instead and this attribute SHOULD + // be left unset in order for the values to be consistent. + // The list of possible values is defined in the + // [W3C User-Agent Client Hints specification]. Note that some (but not all) of + // these values can overlap with values in the + // [`os.type` and `os.name` attributes]. However, for consistency, the values in + // the `browser.platform` attribute should capture the exact value that the user + // agent provides. + // + // [UA client hints API]: https://wicg.github.io/ua-client-hints/#interface + // [W3C User-Agent Client Hints specification]: https://wicg.github.io/ua-client-hints/#sec-ch-ua-platform + // [`os.type` and `os.name` attributes]: ./os.md + BrowserPlatformKey = attribute.Key("browser.platform") +) + +// BrowserBrands returns an attribute KeyValue conforming to the "browser.brands" +// semantic conventions. It represents the array of brand name and version +// separated by a space. +func BrowserBrands(val ...string) attribute.KeyValue { + return BrowserBrandsKey.StringSlice(val) +} + +// BrowserLanguage returns an attribute KeyValue conforming to the +// "browser.language" semantic conventions. It represents the preferred language +// of the user using the browser. +func BrowserLanguage(val string) attribute.KeyValue { + return BrowserLanguageKey.String(val) +} + +// BrowserMobile returns an attribute KeyValue conforming to the "browser.mobile" +// semantic conventions. It represents a boolean that is true if the browser is +// running on a mobile device. +func BrowserMobile(val bool) attribute.KeyValue { + return BrowserMobileKey.Bool(val) +} + +// BrowserPlatform returns an attribute KeyValue conforming to the +// "browser.platform" semantic conventions. It represents the platform on which +// the browser is running. +func BrowserPlatform(val string) attribute.KeyValue { + return BrowserPlatformKey.String(val) +} + +// Namespace: cassandra +const ( + // CassandraConsistencyLevelKey is the attribute Key conforming to the + // "cassandra.consistency.level" semantic conventions. It represents the + // consistency level of the query. Based on consistency values from [CQL]. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // + // [CQL]: https://docs.datastax.com/en/cassandra-oss/3.0/cassandra/dml/dmlConfigConsistency.html + CassandraConsistencyLevelKey = attribute.Key("cassandra.consistency.level") + + // CassandraCoordinatorDCKey is the attribute Key conforming to the + // "cassandra.coordinator.dc" semantic conventions. It represents the data + // center of the coordinating node for a query. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: us-west-2 + CassandraCoordinatorDCKey = attribute.Key("cassandra.coordinator.dc") + + // CassandraCoordinatorIDKey is the attribute Key conforming to the + // "cassandra.coordinator.id" semantic conventions. It represents the ID of the + // coordinating node for a query. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: be13faa2-8574-4d71-926d-27f16cf8a7af + CassandraCoordinatorIDKey = attribute.Key("cassandra.coordinator.id") + + // CassandraPageSizeKey is the attribute Key conforming to the + // "cassandra.page.size" semantic conventions. It represents the fetch size used + // for paging, i.e. how many rows will be returned at once. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 5000 + CassandraPageSizeKey = attribute.Key("cassandra.page.size") + + // CassandraQueryIdempotentKey is the attribute Key conforming to the + // "cassandra.query.idempotent" semantic conventions. It represents the whether + // or not the query is idempotent. + // + // Type: boolean + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + CassandraQueryIdempotentKey = attribute.Key("cassandra.query.idempotent") + + // CassandraSpeculativeExecutionCountKey is the attribute Key conforming to the + // "cassandra.speculative_execution.count" semantic conventions. It represents + // the number of times a query was speculatively executed. Not set or `0` if the + // query was not executed speculatively. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 0, 2 + CassandraSpeculativeExecutionCountKey = attribute.Key("cassandra.speculative_execution.count") +) + +// CassandraCoordinatorDC returns an attribute KeyValue conforming to the +// "cassandra.coordinator.dc" semantic conventions. It represents the data center +// of the coordinating node for a query. +func CassandraCoordinatorDC(val string) attribute.KeyValue { + return CassandraCoordinatorDCKey.String(val) +} + +// CassandraCoordinatorID returns an attribute KeyValue conforming to the +// "cassandra.coordinator.id" semantic conventions. It represents the ID of the +// coordinating node for a query. +func CassandraCoordinatorID(val string) attribute.KeyValue { + return CassandraCoordinatorIDKey.String(val) +} + +// CassandraPageSize returns an attribute KeyValue conforming to the +// "cassandra.page.size" semantic conventions. It represents the fetch size used +// for paging, i.e. how many rows will be returned at once. +func CassandraPageSize(val int) attribute.KeyValue { + return CassandraPageSizeKey.Int(val) +} + +// CassandraQueryIdempotent returns an attribute KeyValue conforming to the +// "cassandra.query.idempotent" semantic conventions. It represents the whether +// or not the query is idempotent. +func CassandraQueryIdempotent(val bool) attribute.KeyValue { + return CassandraQueryIdempotentKey.Bool(val) +} + +// CassandraSpeculativeExecutionCount returns an attribute KeyValue conforming to +// the "cassandra.speculative_execution.count" semantic conventions. It +// represents the number of times a query was speculatively executed. Not set or +// `0` if the query was not executed speculatively. +func CassandraSpeculativeExecutionCount(val int) attribute.KeyValue { + return CassandraSpeculativeExecutionCountKey.Int(val) +} + +// Enum values for cassandra.consistency.level +var ( + // all + // Stability: development + CassandraConsistencyLevelAll = CassandraConsistencyLevelKey.String("all") + // each_quorum + // Stability: development + CassandraConsistencyLevelEachQuorum = CassandraConsistencyLevelKey.String("each_quorum") + // quorum + // Stability: development + CassandraConsistencyLevelQuorum = CassandraConsistencyLevelKey.String("quorum") + // local_quorum + // Stability: development + CassandraConsistencyLevelLocalQuorum = CassandraConsistencyLevelKey.String("local_quorum") + // one + // Stability: development + CassandraConsistencyLevelOne = CassandraConsistencyLevelKey.String("one") + // two + // Stability: development + CassandraConsistencyLevelTwo = CassandraConsistencyLevelKey.String("two") + // three + // Stability: development + CassandraConsistencyLevelThree = CassandraConsistencyLevelKey.String("three") + // local_one + // Stability: development + CassandraConsistencyLevelLocalOne = CassandraConsistencyLevelKey.String("local_one") + // any + // Stability: development + CassandraConsistencyLevelAny = CassandraConsistencyLevelKey.String("any") + // serial + // Stability: development + CassandraConsistencyLevelSerial = CassandraConsistencyLevelKey.String("serial") + // local_serial + // Stability: development + CassandraConsistencyLevelLocalSerial = CassandraConsistencyLevelKey.String("local_serial") +) + +// Namespace: cicd +const ( + // CICDPipelineActionNameKey is the attribute Key conforming to the + // "cicd.pipeline.action.name" semantic conventions. It represents the kind of + // action a pipeline run is performing. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "BUILD", "RUN", "SYNC" + CICDPipelineActionNameKey = attribute.Key("cicd.pipeline.action.name") + + // CICDPipelineNameKey is the attribute Key conforming to the + // "cicd.pipeline.name" semantic conventions. It represents the human readable + // name of the pipeline within a CI/CD system. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Build and Test", "Lint", "Deploy Go Project", + // "deploy_to_environment" + CICDPipelineNameKey = attribute.Key("cicd.pipeline.name") + + // CICDPipelineResultKey is the attribute Key conforming to the + // "cicd.pipeline.result" semantic conventions. It represents the result of a + // pipeline run. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "success", "failure", "timeout", "skipped" + CICDPipelineResultKey = attribute.Key("cicd.pipeline.result") + + // CICDPipelineRunIDKey is the attribute Key conforming to the + // "cicd.pipeline.run.id" semantic conventions. It represents the unique + // identifier of a pipeline run within a CI/CD system. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "120912" + CICDPipelineRunIDKey = attribute.Key("cicd.pipeline.run.id") + + // CICDPipelineRunStateKey is the attribute Key conforming to the + // "cicd.pipeline.run.state" semantic conventions. It represents the pipeline + // run goes through these states during its lifecycle. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "pending", "executing", "finalizing" + CICDPipelineRunStateKey = attribute.Key("cicd.pipeline.run.state") + + // CICDPipelineRunURLFullKey is the attribute Key conforming to the + // "cicd.pipeline.run.url.full" semantic conventions. It represents the [URL] of + // the pipeline run, providing the complete address in order to locate and + // identify the pipeline run. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "https://github.com/open-telemetry/semantic-conventions/actions/runs/9753949763?pr=1075" + // + // [URL]: https://wikipedia.org/wiki/URL + CICDPipelineRunURLFullKey = attribute.Key("cicd.pipeline.run.url.full") + + // CICDPipelineTaskNameKey is the attribute Key conforming to the + // "cicd.pipeline.task.name" semantic conventions. It represents the human + // readable name of a task within a pipeline. Task here most closely aligns with + // a [computing process] in a pipeline. Other terms for tasks include commands, + // steps, and procedures. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Run GoLang Linter", "Go Build", "go-test", "deploy_binary" + // + // [computing process]: https://wikipedia.org/wiki/Pipeline_(computing) + CICDPipelineTaskNameKey = attribute.Key("cicd.pipeline.task.name") + + // CICDPipelineTaskRunIDKey is the attribute Key conforming to the + // "cicd.pipeline.task.run.id" semantic conventions. It represents the unique + // identifier of a task run within a pipeline. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "12097" + CICDPipelineTaskRunIDKey = attribute.Key("cicd.pipeline.task.run.id") + + // CICDPipelineTaskRunResultKey is the attribute Key conforming to the + // "cicd.pipeline.task.run.result" semantic conventions. It represents the + // result of a task run. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "success", "failure", "timeout", "skipped" + CICDPipelineTaskRunResultKey = attribute.Key("cicd.pipeline.task.run.result") + + // CICDPipelineTaskRunURLFullKey is the attribute Key conforming to the + // "cicd.pipeline.task.run.url.full" semantic conventions. It represents the + // [URL] of the pipeline task run, providing the complete address in order to + // locate and identify the pipeline task run. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "https://github.com/open-telemetry/semantic-conventions/actions/runs/9753949763/job/26920038674?pr=1075" + // + // [URL]: https://wikipedia.org/wiki/URL + CICDPipelineTaskRunURLFullKey = attribute.Key("cicd.pipeline.task.run.url.full") + + // CICDPipelineTaskTypeKey is the attribute Key conforming to the + // "cicd.pipeline.task.type" semantic conventions. It represents the type of the + // task within a pipeline. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "build", "test", "deploy" + CICDPipelineTaskTypeKey = attribute.Key("cicd.pipeline.task.type") + + // CICDSystemComponentKey is the attribute Key conforming to the + // "cicd.system.component" semantic conventions. It represents the name of a + // component of the CICD system. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "controller", "scheduler", "agent" + CICDSystemComponentKey = attribute.Key("cicd.system.component") + + // CICDWorkerIDKey is the attribute Key conforming to the "cicd.worker.id" + // semantic conventions. It represents the unique identifier of a worker within + // a CICD system. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "abc123", "10.0.1.2", "controller" + CICDWorkerIDKey = attribute.Key("cicd.worker.id") + + // CICDWorkerNameKey is the attribute Key conforming to the "cicd.worker.name" + // semantic conventions. It represents the name of a worker within a CICD + // system. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "agent-abc", "controller", "Ubuntu LTS" + CICDWorkerNameKey = attribute.Key("cicd.worker.name") + + // CICDWorkerStateKey is the attribute Key conforming to the "cicd.worker.state" + // semantic conventions. It represents the state of a CICD worker / agent. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "idle", "busy", "down" + CICDWorkerStateKey = attribute.Key("cicd.worker.state") + + // CICDWorkerURLFullKey is the attribute Key conforming to the + // "cicd.worker.url.full" semantic conventions. It represents the [URL] of the + // worker, providing the complete address in order to locate and identify the + // worker. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "https://cicd.example.org/worker/abc123" + // + // [URL]: https://wikipedia.org/wiki/URL + CICDWorkerURLFullKey = attribute.Key("cicd.worker.url.full") +) + +// CICDPipelineName returns an attribute KeyValue conforming to the +// "cicd.pipeline.name" semantic conventions. It represents the human readable +// name of the pipeline within a CI/CD system. +func CICDPipelineName(val string) attribute.KeyValue { + return CICDPipelineNameKey.String(val) +} + +// CICDPipelineRunID returns an attribute KeyValue conforming to the +// "cicd.pipeline.run.id" semantic conventions. It represents the unique +// identifier of a pipeline run within a CI/CD system. +func CICDPipelineRunID(val string) attribute.KeyValue { + return CICDPipelineRunIDKey.String(val) +} + +// CICDPipelineRunURLFull returns an attribute KeyValue conforming to the +// "cicd.pipeline.run.url.full" semantic conventions. It represents the [URL] of +// the pipeline run, providing the complete address in order to locate and +// identify the pipeline run. +// +// [URL]: https://wikipedia.org/wiki/URL +func CICDPipelineRunURLFull(val string) attribute.KeyValue { + return CICDPipelineRunURLFullKey.String(val) +} + +// CICDPipelineTaskName returns an attribute KeyValue conforming to the +// "cicd.pipeline.task.name" semantic conventions. It represents the human +// readable name of a task within a pipeline. Task here most closely aligns with +// a [computing process] in a pipeline. Other terms for tasks include commands, +// steps, and procedures. +// +// [computing process]: https://wikipedia.org/wiki/Pipeline_(computing) +func CICDPipelineTaskName(val string) attribute.KeyValue { + return CICDPipelineTaskNameKey.String(val) +} + +// CICDPipelineTaskRunID returns an attribute KeyValue conforming to the +// "cicd.pipeline.task.run.id" semantic conventions. It represents the unique +// identifier of a task run within a pipeline. +func CICDPipelineTaskRunID(val string) attribute.KeyValue { + return CICDPipelineTaskRunIDKey.String(val) +} + +// CICDPipelineTaskRunURLFull returns an attribute KeyValue conforming to the +// "cicd.pipeline.task.run.url.full" semantic conventions. It represents the +// [URL] of the pipeline task run, providing the complete address in order to +// locate and identify the pipeline task run. +// +// [URL]: https://wikipedia.org/wiki/URL +func CICDPipelineTaskRunURLFull(val string) attribute.KeyValue { + return CICDPipelineTaskRunURLFullKey.String(val) +} + +// CICDSystemComponent returns an attribute KeyValue conforming to the +// "cicd.system.component" semantic conventions. It represents the name of a +// component of the CICD system. +func CICDSystemComponent(val string) attribute.KeyValue { + return CICDSystemComponentKey.String(val) +} + +// CICDWorkerID returns an attribute KeyValue conforming to the "cicd.worker.id" +// semantic conventions. It represents the unique identifier of a worker within a +// CICD system. +func CICDWorkerID(val string) attribute.KeyValue { + return CICDWorkerIDKey.String(val) +} + +// CICDWorkerName returns an attribute KeyValue conforming to the +// "cicd.worker.name" semantic conventions. It represents the name of a worker +// within a CICD system. +func CICDWorkerName(val string) attribute.KeyValue { + return CICDWorkerNameKey.String(val) +} + +// CICDWorkerURLFull returns an attribute KeyValue conforming to the +// "cicd.worker.url.full" semantic conventions. It represents the [URL] of the +// worker, providing the complete address in order to locate and identify the +// worker. +// +// [URL]: https://wikipedia.org/wiki/URL +func CICDWorkerURLFull(val string) attribute.KeyValue { + return CICDWorkerURLFullKey.String(val) +} + +// Enum values for cicd.pipeline.action.name +var ( + // The pipeline run is executing a build. + // Stability: development + CICDPipelineActionNameBuild = CICDPipelineActionNameKey.String("BUILD") + // The pipeline run is executing. + // Stability: development + CICDPipelineActionNameRun = CICDPipelineActionNameKey.String("RUN") + // The pipeline run is executing a sync. + // Stability: development + CICDPipelineActionNameSync = CICDPipelineActionNameKey.String("SYNC") +) + +// Enum values for cicd.pipeline.result +var ( + // The pipeline run finished successfully. + // Stability: development + CICDPipelineResultSuccess = CICDPipelineResultKey.String("success") + // The pipeline run did not finish successfully, eg. due to a compile error or a + // failing test. Such failures are usually detected by non-zero exit codes of + // the tools executed in the pipeline run. + // Stability: development + CICDPipelineResultFailure = CICDPipelineResultKey.String("failure") + // The pipeline run failed due to an error in the CICD system, eg. due to the + // worker being killed. + // Stability: development + CICDPipelineResultError = CICDPipelineResultKey.String("error") + // A timeout caused the pipeline run to be interrupted. + // Stability: development + CICDPipelineResultTimeout = CICDPipelineResultKey.String("timeout") + // The pipeline run was cancelled, eg. by a user manually cancelling the + // pipeline run. + // Stability: development + CICDPipelineResultCancellation = CICDPipelineResultKey.String("cancellation") + // The pipeline run was skipped, eg. due to a precondition not being met. + // Stability: development + CICDPipelineResultSkip = CICDPipelineResultKey.String("skip") +) + +// Enum values for cicd.pipeline.run.state +var ( + // The run pending state spans from the event triggering the pipeline run until + // the execution of the run starts (eg. time spent in a queue, provisioning + // agents, creating run resources). + // + // Stability: development + CICDPipelineRunStatePending = CICDPipelineRunStateKey.String("pending") + // The executing state spans the execution of any run tasks (eg. build, test). + // Stability: development + CICDPipelineRunStateExecuting = CICDPipelineRunStateKey.String("executing") + // The finalizing state spans from when the run has finished executing (eg. + // cleanup of run resources). + // Stability: development + CICDPipelineRunStateFinalizing = CICDPipelineRunStateKey.String("finalizing") +) + +// Enum values for cicd.pipeline.task.run.result +var ( + // The task run finished successfully. + // Stability: development + CICDPipelineTaskRunResultSuccess = CICDPipelineTaskRunResultKey.String("success") + // The task run did not finish successfully, eg. due to a compile error or a + // failing test. Such failures are usually detected by non-zero exit codes of + // the tools executed in the task run. + // Stability: development + CICDPipelineTaskRunResultFailure = CICDPipelineTaskRunResultKey.String("failure") + // The task run failed due to an error in the CICD system, eg. due to the worker + // being killed. + // Stability: development + CICDPipelineTaskRunResultError = CICDPipelineTaskRunResultKey.String("error") + // A timeout caused the task run to be interrupted. + // Stability: development + CICDPipelineTaskRunResultTimeout = CICDPipelineTaskRunResultKey.String("timeout") + // The task run was cancelled, eg. by a user manually cancelling the task run. + // Stability: development + CICDPipelineTaskRunResultCancellation = CICDPipelineTaskRunResultKey.String("cancellation") + // The task run was skipped, eg. due to a precondition not being met. + // Stability: development + CICDPipelineTaskRunResultSkip = CICDPipelineTaskRunResultKey.String("skip") +) + +// Enum values for cicd.pipeline.task.type +var ( + // build + // Stability: development + CICDPipelineTaskTypeBuild = CICDPipelineTaskTypeKey.String("build") + // test + // Stability: development + CICDPipelineTaskTypeTest = CICDPipelineTaskTypeKey.String("test") + // deploy + // Stability: development + CICDPipelineTaskTypeDeploy = CICDPipelineTaskTypeKey.String("deploy") +) + +// Enum values for cicd.worker.state +var ( + // The worker is not performing work for the CICD system. It is available to the + // CICD system to perform work on (online / idle). + // Stability: development + CICDWorkerStateAvailable = CICDWorkerStateKey.String("available") + // The worker is performing work for the CICD system. + // Stability: development + CICDWorkerStateBusy = CICDWorkerStateKey.String("busy") + // The worker is not available to the CICD system (disconnected / down). + // Stability: development + CICDWorkerStateOffline = CICDWorkerStateKey.String("offline") +) + +// Namespace: client +const ( + // ClientAddressKey is the attribute Key conforming to the "client.address" + // semantic conventions. It represents the client address - domain name if + // available without reverse DNS lookup; otherwise, IP address or Unix domain + // socket name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "client.example.com", "10.1.2.80", "/tmp/my.sock" + // Note: When observed from the server side, and when communicating through an + // intermediary, `client.address` SHOULD represent the client address behind any + // intermediaries, for example proxies, if it's available. + ClientAddressKey = attribute.Key("client.address") + + // ClientPortKey is the attribute Key conforming to the "client.port" semantic + // conventions. It represents the client port number. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: 65123 + // Note: When observed from the server side, and when communicating through an + // intermediary, `client.port` SHOULD represent the client port behind any + // intermediaries, for example proxies, if it's available. + ClientPortKey = attribute.Key("client.port") +) + +// ClientAddress returns an attribute KeyValue conforming to the "client.address" +// semantic conventions. It represents the client address - domain name if +// available without reverse DNS lookup; otherwise, IP address or Unix domain +// socket name. +func ClientAddress(val string) attribute.KeyValue { + return ClientAddressKey.String(val) +} + +// ClientPort returns an attribute KeyValue conforming to the "client.port" +// semantic conventions. It represents the client port number. +func ClientPort(val int) attribute.KeyValue { + return ClientPortKey.Int(val) +} + +// Namespace: cloud +const ( + // CloudAccountIDKey is the attribute Key conforming to the "cloud.account.id" + // semantic conventions. It represents the cloud account ID the resource is + // assigned to. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "111111111111", "opentelemetry" + CloudAccountIDKey = attribute.Key("cloud.account.id") + + // CloudAvailabilityZoneKey is the attribute Key conforming to the + // "cloud.availability_zone" semantic conventions. It represents the cloud + // regions often have multiple, isolated locations known as zones to increase + // availability. Availability zone represents the zone where the resource is + // running. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "us-east-1c" + // Note: Availability zones are called "zones" on Alibaba Cloud and Google + // Cloud. + CloudAvailabilityZoneKey = attribute.Key("cloud.availability_zone") + + // CloudPlatformKey is the attribute Key conforming to the "cloud.platform" + // semantic conventions. It represents the cloud platform in use. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: The prefix of the service SHOULD match the one specified in + // `cloud.provider`. + CloudPlatformKey = attribute.Key("cloud.platform") + + // CloudProviderKey is the attribute Key conforming to the "cloud.provider" + // semantic conventions. It represents the name of the cloud provider. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + CloudProviderKey = attribute.Key("cloud.provider") + + // CloudRegionKey is the attribute Key conforming to the "cloud.region" semantic + // conventions. It represents the geographical region within a cloud provider. + // When associated with a resource, this attribute specifies the region where + // the resource operates. When calling services or APIs deployed on a cloud, + // this attribute identifies the region where the called destination is + // deployed. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "us-central1", "us-east-1" + // Note: Refer to your provider's docs to see the available regions, for example + // [Alibaba Cloud regions], [AWS regions], [Azure regions], + // [Google Cloud regions], or [Tencent Cloud regions]. + // + // [Alibaba Cloud regions]: https://www.alibabacloud.com/help/doc-detail/40654.htm + // [AWS regions]: https://aws.amazon.com/about-aws/global-infrastructure/regions_az/ + // [Azure regions]: https://azure.microsoft.com/global-infrastructure/geographies/ + // [Google Cloud regions]: https://cloud.google.com/about/locations + // [Tencent Cloud regions]: https://www.tencentcloud.com/document/product/213/6091 + CloudRegionKey = attribute.Key("cloud.region") + + // CloudResourceIDKey is the attribute Key conforming to the "cloud.resource_id" + // semantic conventions. It represents the cloud provider-specific native + // identifier of the monitored cloud resource (e.g. an [ARN] on AWS, a + // [fully qualified resource ID] on Azure, a [full resource name] on GCP). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "arn:aws:lambda:REGION:ACCOUNT_ID:function:my-function", + // "//run.googleapis.com/projects/PROJECT_ID/locations/LOCATION_ID/services/SERVICE_ID", + // "/subscriptions//resourceGroups/ + // /providers/Microsoft.Web/sites//functions/" + // Note: On some cloud providers, it may not be possible to determine the full + // ID at startup, + // so it may be necessary to set `cloud.resource_id` as a span attribute + // instead. + // + // The exact value to use for `cloud.resource_id` depends on the cloud provider. + // The following well-known definitions MUST be used if you set this attribute + // and they apply: + // + // - **AWS Lambda:** The function [ARN]. + // Take care not to use the "invoked ARN" directly but replace any + // [alias suffix] + // with the resolved function version, as the same runtime instance may be + // invocable with + // multiple different aliases. + // - **GCP:** The [URI of the resource] + // - **Azure:** The [Fully Qualified Resource ID] of the invoked function, + // *not* the function app, having the form + // + // `/subscriptions//resourceGroups//providers/Microsoft.Web/sites//functions/` + // . + // This means that a span attribute MUST be used, as an Azure function app + // can host multiple functions that would usually share + // a TracerProvider. + // + // + // [ARN]: https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html + // [fully qualified resource ID]: https://learn.microsoft.com/rest/api/resources/resources/get-by-id + // [full resource name]: https://google.aip.dev/122#full-resource-names + // [ARN]: https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html + // [alias suffix]: https://docs.aws.amazon.com/lambda/latest/dg/configuration-aliases.html + // [URI of the resource]: https://cloud.google.com/iam/docs/full-resource-names + // [Fully Qualified Resource ID]: https://docs.microsoft.com/rest/api/resources/resources/get-by-id + CloudResourceIDKey = attribute.Key("cloud.resource_id") +) + +// CloudAccountID returns an attribute KeyValue conforming to the +// "cloud.account.id" semantic conventions. It represents the cloud account ID +// the resource is assigned to. +func CloudAccountID(val string) attribute.KeyValue { + return CloudAccountIDKey.String(val) +} + +// CloudAvailabilityZone returns an attribute KeyValue conforming to the +// "cloud.availability_zone" semantic conventions. It represents the cloud +// regions often have multiple, isolated locations known as zones to increase +// availability. Availability zone represents the zone where the resource is +// running. +func CloudAvailabilityZone(val string) attribute.KeyValue { + return CloudAvailabilityZoneKey.String(val) +} + +// CloudRegion returns an attribute KeyValue conforming to the "cloud.region" +// semantic conventions. It represents the geographical region within a cloud +// provider. When associated with a resource, this attribute specifies the region +// where the resource operates. When calling services or APIs deployed on a +// cloud, this attribute identifies the region where the called destination is +// deployed. +func CloudRegion(val string) attribute.KeyValue { + return CloudRegionKey.String(val) +} + +// CloudResourceID returns an attribute KeyValue conforming to the +// "cloud.resource_id" semantic conventions. It represents the cloud +// provider-specific native identifier of the monitored cloud resource (e.g. an +// [ARN] on AWS, a [fully qualified resource ID] on Azure, a [full resource name] +// on GCP). +// +// [ARN]: https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html +// [fully qualified resource ID]: https://learn.microsoft.com/rest/api/resources/resources/get-by-id +// [full resource name]: https://google.aip.dev/122#full-resource-names +func CloudResourceID(val string) attribute.KeyValue { + return CloudResourceIDKey.String(val) +} + +// Enum values for cloud.platform +var ( + // Alibaba Cloud Elastic Compute Service + // Stability: development + CloudPlatformAlibabaCloudECS = CloudPlatformKey.String("alibaba_cloud_ecs") + // Alibaba Cloud Function Compute + // Stability: development + CloudPlatformAlibabaCloudFC = CloudPlatformKey.String("alibaba_cloud_fc") + // Red Hat OpenShift on Alibaba Cloud + // Stability: development + CloudPlatformAlibabaCloudOpenShift = CloudPlatformKey.String("alibaba_cloud_openshift") + // AWS Elastic Compute Cloud + // Stability: development + CloudPlatformAWSEC2 = CloudPlatformKey.String("aws_ec2") + // AWS Elastic Container Service + // Stability: development + CloudPlatformAWSECS = CloudPlatformKey.String("aws_ecs") + // AWS Elastic Kubernetes Service + // Stability: development + CloudPlatformAWSEKS = CloudPlatformKey.String("aws_eks") + // AWS Lambda + // Stability: development + CloudPlatformAWSLambda = CloudPlatformKey.String("aws_lambda") + // AWS Elastic Beanstalk + // Stability: development + CloudPlatformAWSElasticBeanstalk = CloudPlatformKey.String("aws_elastic_beanstalk") + // AWS App Runner + // Stability: development + CloudPlatformAWSAppRunner = CloudPlatformKey.String("aws_app_runner") + // Red Hat OpenShift on AWS (ROSA) + // Stability: development + CloudPlatformAWSOpenShift = CloudPlatformKey.String("aws_openshift") + // Azure Virtual Machines + // Stability: development + CloudPlatformAzureVM = CloudPlatformKey.String("azure_vm") + // Azure Container Apps + // Stability: development + CloudPlatformAzureContainerApps = CloudPlatformKey.String("azure_container_apps") + // Azure Container Instances + // Stability: development + CloudPlatformAzureContainerInstances = CloudPlatformKey.String("azure_container_instances") + // Azure Kubernetes Service + // Stability: development + CloudPlatformAzureAKS = CloudPlatformKey.String("azure_aks") + // Azure Functions + // Stability: development + CloudPlatformAzureFunctions = CloudPlatformKey.String("azure_functions") + // Azure App Service + // Stability: development + CloudPlatformAzureAppService = CloudPlatformKey.String("azure_app_service") + // Azure Red Hat OpenShift + // Stability: development + CloudPlatformAzureOpenShift = CloudPlatformKey.String("azure_openshift") + // Google Bare Metal Solution (BMS) + // Stability: development + CloudPlatformGCPBareMetalSolution = CloudPlatformKey.String("gcp_bare_metal_solution") + // Google Cloud Compute Engine (GCE) + // Stability: development + CloudPlatformGCPComputeEngine = CloudPlatformKey.String("gcp_compute_engine") + // Google Cloud Run + // Stability: development + CloudPlatformGCPCloudRun = CloudPlatformKey.String("gcp_cloud_run") + // Google Cloud Kubernetes Engine (GKE) + // Stability: development + CloudPlatformGCPKubernetesEngine = CloudPlatformKey.String("gcp_kubernetes_engine") + // Google Cloud Functions (GCF) + // Stability: development + CloudPlatformGCPCloudFunctions = CloudPlatformKey.String("gcp_cloud_functions") + // Google Cloud App Engine (GAE) + // Stability: development + CloudPlatformGCPAppEngine = CloudPlatformKey.String("gcp_app_engine") + // Red Hat OpenShift on Google Cloud + // Stability: development + CloudPlatformGCPOpenShift = CloudPlatformKey.String("gcp_openshift") + // Red Hat OpenShift on IBM Cloud + // Stability: development + CloudPlatformIBMCloudOpenShift = CloudPlatformKey.String("ibm_cloud_openshift") + // Compute on Oracle Cloud Infrastructure (OCI) + // Stability: development + CloudPlatformOracleCloudCompute = CloudPlatformKey.String("oracle_cloud_compute") + // Kubernetes Engine (OKE) on Oracle Cloud Infrastructure (OCI) + // Stability: development + CloudPlatformOracleCloudOKE = CloudPlatformKey.String("oracle_cloud_oke") + // Tencent Cloud Cloud Virtual Machine (CVM) + // Stability: development + CloudPlatformTencentCloudCVM = CloudPlatformKey.String("tencent_cloud_cvm") + // Tencent Cloud Elastic Kubernetes Service (EKS) + // Stability: development + CloudPlatformTencentCloudEKS = CloudPlatformKey.String("tencent_cloud_eks") + // Tencent Cloud Serverless Cloud Function (SCF) + // Stability: development + CloudPlatformTencentCloudSCF = CloudPlatformKey.String("tencent_cloud_scf") +) + +// Enum values for cloud.provider +var ( + // Alibaba Cloud + // Stability: development + CloudProviderAlibabaCloud = CloudProviderKey.String("alibaba_cloud") + // Amazon Web Services + // Stability: development + CloudProviderAWS = CloudProviderKey.String("aws") + // Microsoft Azure + // Stability: development + CloudProviderAzure = CloudProviderKey.String("azure") + // Google Cloud Platform + // Stability: development + CloudProviderGCP = CloudProviderKey.String("gcp") + // Heroku Platform as a Service + // Stability: development + CloudProviderHeroku = CloudProviderKey.String("heroku") + // IBM Cloud + // Stability: development + CloudProviderIBMCloud = CloudProviderKey.String("ibm_cloud") + // Oracle Cloud Infrastructure (OCI) + // Stability: development + CloudProviderOracleCloud = CloudProviderKey.String("oracle_cloud") + // Tencent Cloud + // Stability: development + CloudProviderTencentCloud = CloudProviderKey.String("tencent_cloud") +) + +// Namespace: cloudevents +const ( + // CloudEventsEventIDKey is the attribute Key conforming to the + // "cloudevents.event_id" semantic conventions. It represents the [event_id] + // uniquely identifies the event. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "123e4567-e89b-12d3-a456-426614174000", "0001" + // + // [event_id]: https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/spec.md#id + CloudEventsEventIDKey = attribute.Key("cloudevents.event_id") + + // CloudEventsEventSourceKey is the attribute Key conforming to the + // "cloudevents.event_source" semantic conventions. It represents the [source] + // identifies the context in which an event happened. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "https://github.com/cloudevents", "/cloudevents/spec/pull/123", + // "my-service" + // + // [source]: https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/spec.md#source-1 + CloudEventsEventSourceKey = attribute.Key("cloudevents.event_source") + + // CloudEventsEventSpecVersionKey is the attribute Key conforming to the + // "cloudevents.event_spec_version" semantic conventions. It represents the + // [version of the CloudEvents specification] which the event uses. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1.0 + // + // [version of the CloudEvents specification]: https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/spec.md#specversion + CloudEventsEventSpecVersionKey = attribute.Key("cloudevents.event_spec_version") + + // CloudEventsEventSubjectKey is the attribute Key conforming to the + // "cloudevents.event_subject" semantic conventions. It represents the [subject] + // of the event in the context of the event producer (identified by source). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: mynewfile.jpg + // + // [subject]: https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/spec.md#subject + CloudEventsEventSubjectKey = attribute.Key("cloudevents.event_subject") + + // CloudEventsEventTypeKey is the attribute Key conforming to the + // "cloudevents.event_type" semantic conventions. It represents the [event_type] + // contains a value describing the type of event related to the originating + // occurrence. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "com.github.pull_request.opened", "com.example.object.deleted.v2" + // + // [event_type]: https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/spec.md#type + CloudEventsEventTypeKey = attribute.Key("cloudevents.event_type") +) + +// CloudEventsEventID returns an attribute KeyValue conforming to the +// "cloudevents.event_id" semantic conventions. It represents the [event_id] +// uniquely identifies the event. +// +// [event_id]: https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/spec.md#id +func CloudEventsEventID(val string) attribute.KeyValue { + return CloudEventsEventIDKey.String(val) +} + +// CloudEventsEventSource returns an attribute KeyValue conforming to the +// "cloudevents.event_source" semantic conventions. It represents the [source] +// identifies the context in which an event happened. +// +// [source]: https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/spec.md#source-1 +func CloudEventsEventSource(val string) attribute.KeyValue { + return CloudEventsEventSourceKey.String(val) +} + +// CloudEventsEventSpecVersion returns an attribute KeyValue conforming to the +// "cloudevents.event_spec_version" semantic conventions. It represents the +// [version of the CloudEvents specification] which the event uses. +// +// [version of the CloudEvents specification]: https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/spec.md#specversion +func CloudEventsEventSpecVersion(val string) attribute.KeyValue { + return CloudEventsEventSpecVersionKey.String(val) +} + +// CloudEventsEventSubject returns an attribute KeyValue conforming to the +// "cloudevents.event_subject" semantic conventions. It represents the [subject] +// of the event in the context of the event producer (identified by source). +// +// [subject]: https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/spec.md#subject +func CloudEventsEventSubject(val string) attribute.KeyValue { + return CloudEventsEventSubjectKey.String(val) +} + +// CloudEventsEventType returns an attribute KeyValue conforming to the +// "cloudevents.event_type" semantic conventions. It represents the [event_type] +// contains a value describing the type of event related to the originating +// occurrence. +// +// [event_type]: https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/spec.md#type +func CloudEventsEventType(val string) attribute.KeyValue { + return CloudEventsEventTypeKey.String(val) +} + +// Namespace: cloudfoundry +const ( + // CloudFoundryAppIDKey is the attribute Key conforming to the + // "cloudfoundry.app.id" semantic conventions. It represents the guid of the + // application. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "218fc5a9-a5f1-4b54-aa05-46717d0ab26d" + // Note: Application instrumentation should use the value from environment + // variable `VCAP_APPLICATION.application_id`. This is the same value as + // reported by `cf app --guid`. + CloudFoundryAppIDKey = attribute.Key("cloudfoundry.app.id") + + // CloudFoundryAppInstanceIDKey is the attribute Key conforming to the + // "cloudfoundry.app.instance.id" semantic conventions. It represents the index + // of the application instance. 0 when just one instance is active. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "0", "1" + // Note: CloudFoundry defines the `instance_id` in the [Loggregator v2 envelope] + // . + // It is used for logs and metrics emitted by CloudFoundry. It is + // supposed to contain the application instance index for applications + // deployed on the runtime. + // + // Application instrumentation should use the value from environment + // variable `CF_INSTANCE_INDEX`. + // + // [Loggregator v2 envelope]: https://github.com/cloudfoundry/loggregator-api#v2-envelope + CloudFoundryAppInstanceIDKey = attribute.Key("cloudfoundry.app.instance.id") + + // CloudFoundryAppNameKey is the attribute Key conforming to the + // "cloudfoundry.app.name" semantic conventions. It represents the name of the + // application. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-app-name" + // Note: Application instrumentation should use the value from environment + // variable `VCAP_APPLICATION.application_name`. This is the same value + // as reported by `cf apps`. + CloudFoundryAppNameKey = attribute.Key("cloudfoundry.app.name") + + // CloudFoundryOrgIDKey is the attribute Key conforming to the + // "cloudfoundry.org.id" semantic conventions. It represents the guid of the + // CloudFoundry org the application is running in. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "218fc5a9-a5f1-4b54-aa05-46717d0ab26d" + // Note: Application instrumentation should use the value from environment + // variable `VCAP_APPLICATION.org_id`. This is the same value as + // reported by `cf org --guid`. + CloudFoundryOrgIDKey = attribute.Key("cloudfoundry.org.id") + + // CloudFoundryOrgNameKey is the attribute Key conforming to the + // "cloudfoundry.org.name" semantic conventions. It represents the name of the + // CloudFoundry organization the app is running in. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-org-name" + // Note: Application instrumentation should use the value from environment + // variable `VCAP_APPLICATION.org_name`. This is the same value as + // reported by `cf orgs`. + CloudFoundryOrgNameKey = attribute.Key("cloudfoundry.org.name") + + // CloudFoundryProcessIDKey is the attribute Key conforming to the + // "cloudfoundry.process.id" semantic conventions. It represents the UID + // identifying the process. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "218fc5a9-a5f1-4b54-aa05-46717d0ab26d" + // Note: Application instrumentation should use the value from environment + // variable `VCAP_APPLICATION.process_id`. It is supposed to be equal to + // `VCAP_APPLICATION.app_id` for applications deployed to the runtime. + // For system components, this could be the actual PID. + CloudFoundryProcessIDKey = attribute.Key("cloudfoundry.process.id") + + // CloudFoundryProcessTypeKey is the attribute Key conforming to the + // "cloudfoundry.process.type" semantic conventions. It represents the type of + // process. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "web" + // Note: CloudFoundry applications can consist of multiple jobs. Usually the + // main process will be of type `web`. There can be additional background + // tasks or side-cars with different process types. + CloudFoundryProcessTypeKey = attribute.Key("cloudfoundry.process.type") + + // CloudFoundrySpaceIDKey is the attribute Key conforming to the + // "cloudfoundry.space.id" semantic conventions. It represents the guid of the + // CloudFoundry space the application is running in. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "218fc5a9-a5f1-4b54-aa05-46717d0ab26d" + // Note: Application instrumentation should use the value from environment + // variable `VCAP_APPLICATION.space_id`. This is the same value as + // reported by `cf space --guid`. + CloudFoundrySpaceIDKey = attribute.Key("cloudfoundry.space.id") + + // CloudFoundrySpaceNameKey is the attribute Key conforming to the + // "cloudfoundry.space.name" semantic conventions. It represents the name of the + // CloudFoundry space the application is running in. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-space-name" + // Note: Application instrumentation should use the value from environment + // variable `VCAP_APPLICATION.space_name`. This is the same value as + // reported by `cf spaces`. + CloudFoundrySpaceNameKey = attribute.Key("cloudfoundry.space.name") + + // CloudFoundrySystemIDKey is the attribute Key conforming to the + // "cloudfoundry.system.id" semantic conventions. It represents a guid or + // another name describing the event source. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "cf/gorouter" + // Note: CloudFoundry defines the `source_id` in the [Loggregator v2 envelope]. + // It is used for logs and metrics emitted by CloudFoundry. It is + // supposed to contain the component name, e.g. "gorouter", for + // CloudFoundry components. + // + // When system components are instrumented, values from the + // [Bosh spec] + // should be used. The `system.id` should be set to + // `spec.deployment/spec.name`. + // + // [Loggregator v2 envelope]: https://github.com/cloudfoundry/loggregator-api#v2-envelope + // [Bosh spec]: https://bosh.io/docs/jobs/#properties-spec + CloudFoundrySystemIDKey = attribute.Key("cloudfoundry.system.id") + + // CloudFoundrySystemInstanceIDKey is the attribute Key conforming to the + // "cloudfoundry.system.instance.id" semantic conventions. It represents a guid + // describing the concrete instance of the event source. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "218fc5a9-a5f1-4b54-aa05-46717d0ab26d" + // Note: CloudFoundry defines the `instance_id` in the [Loggregator v2 envelope] + // . + // It is used for logs and metrics emitted by CloudFoundry. It is + // supposed to contain the vm id for CloudFoundry components. + // + // When system components are instrumented, values from the + // [Bosh spec] + // should be used. The `system.instance.id` should be set to `spec.id`. + // + // [Loggregator v2 envelope]: https://github.com/cloudfoundry/loggregator-api#v2-envelope + // [Bosh spec]: https://bosh.io/docs/jobs/#properties-spec + CloudFoundrySystemInstanceIDKey = attribute.Key("cloudfoundry.system.instance.id") +) + +// CloudFoundryAppID returns an attribute KeyValue conforming to the +// "cloudfoundry.app.id" semantic conventions. It represents the guid of the +// application. +func CloudFoundryAppID(val string) attribute.KeyValue { + return CloudFoundryAppIDKey.String(val) +} + +// CloudFoundryAppInstanceID returns an attribute KeyValue conforming to the +// "cloudfoundry.app.instance.id" semantic conventions. It represents the index +// of the application instance. 0 when just one instance is active. +func CloudFoundryAppInstanceID(val string) attribute.KeyValue { + return CloudFoundryAppInstanceIDKey.String(val) +} + +// CloudFoundryAppName returns an attribute KeyValue conforming to the +// "cloudfoundry.app.name" semantic conventions. It represents the name of the +// application. +func CloudFoundryAppName(val string) attribute.KeyValue { + return CloudFoundryAppNameKey.String(val) +} + +// CloudFoundryOrgID returns an attribute KeyValue conforming to the +// "cloudfoundry.org.id" semantic conventions. It represents the guid of the +// CloudFoundry org the application is running in. +func CloudFoundryOrgID(val string) attribute.KeyValue { + return CloudFoundryOrgIDKey.String(val) +} + +// CloudFoundryOrgName returns an attribute KeyValue conforming to the +// "cloudfoundry.org.name" semantic conventions. It represents the name of the +// CloudFoundry organization the app is running in. +func CloudFoundryOrgName(val string) attribute.KeyValue { + return CloudFoundryOrgNameKey.String(val) +} + +// CloudFoundryProcessID returns an attribute KeyValue conforming to the +// "cloudfoundry.process.id" semantic conventions. It represents the UID +// identifying the process. +func CloudFoundryProcessID(val string) attribute.KeyValue { + return CloudFoundryProcessIDKey.String(val) +} + +// CloudFoundryProcessType returns an attribute KeyValue conforming to the +// "cloudfoundry.process.type" semantic conventions. It represents the type of +// process. +func CloudFoundryProcessType(val string) attribute.KeyValue { + return CloudFoundryProcessTypeKey.String(val) +} + +// CloudFoundrySpaceID returns an attribute KeyValue conforming to the +// "cloudfoundry.space.id" semantic conventions. It represents the guid of the +// CloudFoundry space the application is running in. +func CloudFoundrySpaceID(val string) attribute.KeyValue { + return CloudFoundrySpaceIDKey.String(val) +} + +// CloudFoundrySpaceName returns an attribute KeyValue conforming to the +// "cloudfoundry.space.name" semantic conventions. It represents the name of the +// CloudFoundry space the application is running in. +func CloudFoundrySpaceName(val string) attribute.KeyValue { + return CloudFoundrySpaceNameKey.String(val) +} + +// CloudFoundrySystemID returns an attribute KeyValue conforming to the +// "cloudfoundry.system.id" semantic conventions. It represents a guid or another +// name describing the event source. +func CloudFoundrySystemID(val string) attribute.KeyValue { + return CloudFoundrySystemIDKey.String(val) +} + +// CloudFoundrySystemInstanceID returns an attribute KeyValue conforming to the +// "cloudfoundry.system.instance.id" semantic conventions. It represents a guid +// describing the concrete instance of the event source. +func CloudFoundrySystemInstanceID(val string) attribute.KeyValue { + return CloudFoundrySystemInstanceIDKey.String(val) +} + +// Namespace: code +const ( + // CodeColumnNumberKey is the attribute Key conforming to the + // "code.column.number" semantic conventions. It represents the column number in + // `code.file.path` best representing the operation. It SHOULD point within the + // code unit named in `code.function.name`. This attribute MUST NOT be used on + // the Profile signal since the data is already captured in 'message Line'. This + // constraint is imposed to prevent redundancy and maintain data integrity. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Stable + CodeColumnNumberKey = attribute.Key("code.column.number") + + // CodeFilePathKey is the attribute Key conforming to the "code.file.path" + // semantic conventions. It represents the source code file name that identifies + // the code unit as uniquely as possible (preferably an absolute file path). + // This attribute MUST NOT be used on the Profile signal since the data is + // already captured in 'message Function'. This constraint is imposed to prevent + // redundancy and maintain data integrity. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: /usr/local/MyApplication/content_root/app/index.php + CodeFilePathKey = attribute.Key("code.file.path") + + // CodeFunctionNameKey is the attribute Key conforming to the + // "code.function.name" semantic conventions. It represents the method or + // function fully-qualified name without arguments. The value should fit the + // natural representation of the language runtime, which is also likely the same + // used within `code.stacktrace` attribute value. This attribute MUST NOT be + // used on the Profile signal since the data is already captured in 'message + // Function'. This constraint is imposed to prevent redundancy and maintain data + // integrity. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "com.example.MyHttpService.serveRequest", + // "GuzzleHttp\Client::transfer", "fopen" + // Note: Values and format depends on each language runtime, thus it is + // impossible to provide an exhaustive list of examples. + // The values are usually the same (or prefixes of) the ones found in native + // stack trace representation stored in + // `code.stacktrace` without information on arguments. + // + // Examples: + // + // - Java method: `com.example.MyHttpService.serveRequest` + // - Java anonymous class method: `com.mycompany.Main$1.myMethod` + // - Java lambda method: + // `com.mycompany.Main$$Lambda/0x0000748ae4149c00.myMethod` + // - PHP function: `GuzzleHttp\Client::transfer` + // - Go function: `github.com/my/repo/pkg.foo.func5` + // - Elixir: `OpenTelemetry.Ctx.new` + // - Erlang: `opentelemetry_ctx:new` + // - Rust: `playground::my_module::my_cool_func` + // - C function: `fopen` + CodeFunctionNameKey = attribute.Key("code.function.name") + + // CodeLineNumberKey is the attribute Key conforming to the "code.line.number" + // semantic conventions. It represents the line number in `code.file.path` best + // representing the operation. It SHOULD point within the code unit named in + // `code.function.name`. This attribute MUST NOT be used on the Profile signal + // since the data is already captured in 'message Line'. This constraint is + // imposed to prevent redundancy and maintain data integrity. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Stable + CodeLineNumberKey = attribute.Key("code.line.number") + + // CodeStacktraceKey is the attribute Key conforming to the "code.stacktrace" + // semantic conventions. It represents a stacktrace as a string in the natural + // representation for the language runtime. The representation is identical to + // [`exception.stacktrace`]. This attribute MUST NOT be used on the Profile + // signal since the data is already captured in 'message Location'. This + // constraint is imposed to prevent redundancy and maintain data integrity. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: at com.example.GenerateTrace.methodB(GenerateTrace.java:13)\n at + // com.example.GenerateTrace.methodA(GenerateTrace.java:9)\n at + // com.example.GenerateTrace.main(GenerateTrace.java:5) + // + // [`exception.stacktrace`]: /docs/exceptions/exceptions-spans.md#stacktrace-representation + CodeStacktraceKey = attribute.Key("code.stacktrace") +) + +// CodeColumnNumber returns an attribute KeyValue conforming to the +// "code.column.number" semantic conventions. It represents the column number in +// `code.file.path` best representing the operation. It SHOULD point within the +// code unit named in `code.function.name`. This attribute MUST NOT be used on +// the Profile signal since the data is already captured in 'message Line'. This +// constraint is imposed to prevent redundancy and maintain data integrity. +func CodeColumnNumber(val int) attribute.KeyValue { + return CodeColumnNumberKey.Int(val) +} + +// CodeFilePath returns an attribute KeyValue conforming to the "code.file.path" +// semantic conventions. It represents the source code file name that identifies +// the code unit as uniquely as possible (preferably an absolute file path). This +// attribute MUST NOT be used on the Profile signal since the data is already +// captured in 'message Function'. This constraint is imposed to prevent +// redundancy and maintain data integrity. +func CodeFilePath(val string) attribute.KeyValue { + return CodeFilePathKey.String(val) +} + +// CodeFunctionName returns an attribute KeyValue conforming to the +// "code.function.name" semantic conventions. It represents the method or +// function fully-qualified name without arguments. The value should fit the +// natural representation of the language runtime, which is also likely the same +// used within `code.stacktrace` attribute value. This attribute MUST NOT be used +// on the Profile signal since the data is already captured in 'message +// Function'. This constraint is imposed to prevent redundancy and maintain data +// integrity. +func CodeFunctionName(val string) attribute.KeyValue { + return CodeFunctionNameKey.String(val) +} + +// CodeLineNumber returns an attribute KeyValue conforming to the +// "code.line.number" semantic conventions. It represents the line number in +// `code.file.path` best representing the operation. It SHOULD point within the +// code unit named in `code.function.name`. This attribute MUST NOT be used on +// the Profile signal since the data is already captured in 'message Line'. This +// constraint is imposed to prevent redundancy and maintain data integrity. +func CodeLineNumber(val int) attribute.KeyValue { + return CodeLineNumberKey.Int(val) +} + +// CodeStacktrace returns an attribute KeyValue conforming to the +// "code.stacktrace" semantic conventions. It represents a stacktrace as a string +// in the natural representation for the language runtime. The representation is +// identical to [`exception.stacktrace`]. This attribute MUST NOT be used on the +// Profile signal since the data is already captured in 'message Location'. This +// constraint is imposed to prevent redundancy and maintain data integrity. +// +// [`exception.stacktrace`]: /docs/exceptions/exceptions-spans.md#stacktrace-representation +func CodeStacktrace(val string) attribute.KeyValue { + return CodeStacktraceKey.String(val) +} + +// Namespace: container +const ( + // ContainerCommandKey is the attribute Key conforming to the + // "container.command" semantic conventions. It represents the command used to + // run the container (i.e. the command name). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "otelcontribcol" + // Note: If using embedded credentials or sensitive data, it is recommended to + // remove them to prevent potential leakage. + ContainerCommandKey = attribute.Key("container.command") + + // ContainerCommandArgsKey is the attribute Key conforming to the + // "container.command_args" semantic conventions. It represents the all the + // command arguments (including the command/executable itself) run by the + // container. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "otelcontribcol", "--config", "config.yaml" + ContainerCommandArgsKey = attribute.Key("container.command_args") + + // ContainerCommandLineKey is the attribute Key conforming to the + // "container.command_line" semantic conventions. It represents the full command + // run by the container as a single string representing the full command. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "otelcontribcol --config config.yaml" + ContainerCommandLineKey = attribute.Key("container.command_line") + + // ContainerCSIPluginNameKey is the attribute Key conforming to the + // "container.csi.plugin.name" semantic conventions. It represents the name of + // the CSI ([Container Storage Interface]) plugin used by the volume. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "pd.csi.storage.gke.io" + // Note: This can sometimes be referred to as a "driver" in CSI implementations. + // This should represent the `name` field of the GetPluginInfo RPC. + // + // [Container Storage Interface]: https://github.com/container-storage-interface/spec + ContainerCSIPluginNameKey = attribute.Key("container.csi.plugin.name") + + // ContainerCSIVolumeIDKey is the attribute Key conforming to the + // "container.csi.volume.id" semantic conventions. It represents the unique + // volume ID returned by the CSI ([Container Storage Interface]) plugin. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "projects/my-gcp-project/zones/my-gcp-zone/disks/my-gcp-disk" + // Note: This can sometimes be referred to as a "volume handle" in CSI + // implementations. This should represent the `Volume.volume_id` field in CSI + // spec. + // + // [Container Storage Interface]: https://github.com/container-storage-interface/spec + ContainerCSIVolumeIDKey = attribute.Key("container.csi.volume.id") + + // ContainerIDKey is the attribute Key conforming to the "container.id" semantic + // conventions. It represents the container ID. Usually a UUID, as for example + // used to [identify Docker containers]. The UUID might be abbreviated. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "a3bf90e006b2" + // + // [identify Docker containers]: https://docs.docker.com/engine/containers/run/#container-identification + ContainerIDKey = attribute.Key("container.id") + + // ContainerImageIDKey is the attribute Key conforming to the + // "container.image.id" semantic conventions. It represents the runtime specific + // image identifier. Usually a hash algorithm followed by a UUID. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "sha256:19c92d0a00d1b66d897bceaa7319bee0dd38a10a851c60bcec9474aa3f01e50f" + // Note: Docker defines a sha256 of the image id; `container.image.id` + // corresponds to the `Image` field from the Docker container inspect [API] + // endpoint. + // K8s defines a link to the container registry repository with digest + // `"imageID": "registry.azurecr.io /namespace/service/dockerfile@sha256:bdeabd40c3a8a492eaf9e8e44d0ebbb84bac7ee25ac0cf8a7159d25f62555625"` + // . + // The ID is assigned by the container runtime and can vary in different + // environments. Consider using `oci.manifest.digest` if it is important to + // identify the same image in different environments/runtimes. + // + // [API]: https://docs.docker.com/engine/api/v1.43/#tag/Container/operation/ContainerInspect + ContainerImageIDKey = attribute.Key("container.image.id") + + // ContainerImageNameKey is the attribute Key conforming to the + // "container.image.name" semantic conventions. It represents the name of the + // image the container was built on. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "gcr.io/opentelemetry/operator" + ContainerImageNameKey = attribute.Key("container.image.name") + + // ContainerImageRepoDigestsKey is the attribute Key conforming to the + // "container.image.repo_digests" semantic conventions. It represents the repo + // digests of the container image as provided by the container runtime. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "example@sha256:afcc7f1ac1b49db317a7196c902e61c6c3c4607d63599ee1a82d702d249a0ccb", + // "internal.registry.example.com:5000/example@sha256:b69959407d21e8a062e0416bf13405bb2b71ed7a84dde4158ebafacfa06f5578" + // Note: [Docker] and [CRI] report those under the `RepoDigests` field. + // + // [Docker]: https://docs.docker.com/engine/api/v1.43/#tag/Image/operation/ImageInspect + // [CRI]: https://github.com/kubernetes/cri-api/blob/c75ef5b473bbe2d0a4fc92f82235efd665ea8e9f/pkg/apis/runtime/v1/api.proto#L1237-L1238 + ContainerImageRepoDigestsKey = attribute.Key("container.image.repo_digests") + + // ContainerImageTagsKey is the attribute Key conforming to the + // "container.image.tags" semantic conventions. It represents the container + // image tags. An example can be found in [Docker Image Inspect]. Should be only + // the `` section of the full name for example from + // `registry.example.com/my-org/my-image:`. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "v1.27.1", "3.5.7-0" + // + // [Docker Image Inspect]: https://docs.docker.com/engine/api/v1.43/#tag/Image/operation/ImageInspect + ContainerImageTagsKey = attribute.Key("container.image.tags") + + // ContainerNameKey is the attribute Key conforming to the "container.name" + // semantic conventions. It represents the container name used by container + // runtime. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry-autoconf" + ContainerNameKey = attribute.Key("container.name") + + // ContainerRuntimeKey is the attribute Key conforming to the + // "container.runtime" semantic conventions. It represents the container runtime + // managing this container. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "docker", "containerd", "rkt" + ContainerRuntimeKey = attribute.Key("container.runtime") +) + +// ContainerCommand returns an attribute KeyValue conforming to the +// "container.command" semantic conventions. It represents the command used to +// run the container (i.e. the command name). +func ContainerCommand(val string) attribute.KeyValue { + return ContainerCommandKey.String(val) +} + +// ContainerCommandArgs returns an attribute KeyValue conforming to the +// "container.command_args" semantic conventions. It represents the all the +// command arguments (including the command/executable itself) run by the +// container. +func ContainerCommandArgs(val ...string) attribute.KeyValue { + return ContainerCommandArgsKey.StringSlice(val) +} + +// ContainerCommandLine returns an attribute KeyValue conforming to the +// "container.command_line" semantic conventions. It represents the full command +// run by the container as a single string representing the full command. +func ContainerCommandLine(val string) attribute.KeyValue { + return ContainerCommandLineKey.String(val) +} + +// ContainerCSIPluginName returns an attribute KeyValue conforming to the +// "container.csi.plugin.name" semantic conventions. It represents the name of +// the CSI ([Container Storage Interface]) plugin used by the volume. +// +// [Container Storage Interface]: https://github.com/container-storage-interface/spec +func ContainerCSIPluginName(val string) attribute.KeyValue { + return ContainerCSIPluginNameKey.String(val) +} + +// ContainerCSIVolumeID returns an attribute KeyValue conforming to the +// "container.csi.volume.id" semantic conventions. It represents the unique +// volume ID returned by the CSI ([Container Storage Interface]) plugin. +// +// [Container Storage Interface]: https://github.com/container-storage-interface/spec +func ContainerCSIVolumeID(val string) attribute.KeyValue { + return ContainerCSIVolumeIDKey.String(val) +} + +// ContainerID returns an attribute KeyValue conforming to the "container.id" +// semantic conventions. It represents the container ID. Usually a UUID, as for +// example used to [identify Docker containers]. The UUID might be abbreviated. +// +// [identify Docker containers]: https://docs.docker.com/engine/containers/run/#container-identification +func ContainerID(val string) attribute.KeyValue { + return ContainerIDKey.String(val) +} + +// ContainerImageID returns an attribute KeyValue conforming to the +// "container.image.id" semantic conventions. It represents the runtime specific +// image identifier. Usually a hash algorithm followed by a UUID. +func ContainerImageID(val string) attribute.KeyValue { + return ContainerImageIDKey.String(val) +} + +// ContainerImageName returns an attribute KeyValue conforming to the +// "container.image.name" semantic conventions. It represents the name of the +// image the container was built on. +func ContainerImageName(val string) attribute.KeyValue { + return ContainerImageNameKey.String(val) +} + +// ContainerImageRepoDigests returns an attribute KeyValue conforming to the +// "container.image.repo_digests" semantic conventions. It represents the repo +// digests of the container image as provided by the container runtime. +func ContainerImageRepoDigests(val ...string) attribute.KeyValue { + return ContainerImageRepoDigestsKey.StringSlice(val) +} + +// ContainerImageTags returns an attribute KeyValue conforming to the +// "container.image.tags" semantic conventions. It represents the container image +// tags. An example can be found in [Docker Image Inspect]. Should be only the +// `` section of the full name for example from +// `registry.example.com/my-org/my-image:`. +// +// [Docker Image Inspect]: https://docs.docker.com/engine/api/v1.43/#tag/Image/operation/ImageInspect +func ContainerImageTags(val ...string) attribute.KeyValue { + return ContainerImageTagsKey.StringSlice(val) +} + +// ContainerName returns an attribute KeyValue conforming to the "container.name" +// semantic conventions. It represents the container name used by container +// runtime. +func ContainerName(val string) attribute.KeyValue { + return ContainerNameKey.String(val) +} + +// ContainerRuntime returns an attribute KeyValue conforming to the +// "container.runtime" semantic conventions. It represents the container runtime +// managing this container. +func ContainerRuntime(val string) attribute.KeyValue { + return ContainerRuntimeKey.String(val) +} + +// Namespace: cpu +const ( + // CPULogicalNumberKey is the attribute Key conforming to the + // "cpu.logical_number" semantic conventions. It represents the logical CPU + // number [0..n-1]. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1 + CPULogicalNumberKey = attribute.Key("cpu.logical_number") + + // CPUModeKey is the attribute Key conforming to the "cpu.mode" semantic + // conventions. It represents the mode of the CPU. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "user", "system" + CPUModeKey = attribute.Key("cpu.mode") +) + +// CPULogicalNumber returns an attribute KeyValue conforming to the +// "cpu.logical_number" semantic conventions. It represents the logical CPU +// number [0..n-1]. +func CPULogicalNumber(val int) attribute.KeyValue { + return CPULogicalNumberKey.Int(val) +} + +// Enum values for cpu.mode +var ( + // user + // Stability: development + CPUModeUser = CPUModeKey.String("user") + // system + // Stability: development + CPUModeSystem = CPUModeKey.String("system") + // nice + // Stability: development + CPUModeNice = CPUModeKey.String("nice") + // idle + // Stability: development + CPUModeIdle = CPUModeKey.String("idle") + // iowait + // Stability: development + CPUModeIOWait = CPUModeKey.String("iowait") + // interrupt + // Stability: development + CPUModeInterrupt = CPUModeKey.String("interrupt") + // steal + // Stability: development + CPUModeSteal = CPUModeKey.String("steal") + // kernel + // Stability: development + CPUModeKernel = CPUModeKey.String("kernel") +) + +// Namespace: db +const ( + // DBClientConnectionPoolNameKey is the attribute Key conforming to the + // "db.client.connection.pool.name" semantic conventions. It represents the name + // of the connection pool; unique within the instrumented application. In case + // the connection pool implementation doesn't provide a name, instrumentation + // SHOULD use a combination of parameters that would make the name unique, for + // example, combining attributes `server.address`, `server.port`, and + // `db.namespace`, formatted as `server.address:server.port/db.namespace`. + // Instrumentations that generate connection pool name following different + // patterns SHOULD document it. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "myDataSource" + DBClientConnectionPoolNameKey = attribute.Key("db.client.connection.pool.name") + + // DBClientConnectionStateKey is the attribute Key conforming to the + // "db.client.connection.state" semantic conventions. It represents the state of + // a connection in the pool. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "idle" + DBClientConnectionStateKey = attribute.Key("db.client.connection.state") + + // DBCollectionNameKey is the attribute Key conforming to the + // "db.collection.name" semantic conventions. It represents the name of a + // collection (table, container) within the database. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "public.users", "customers" + // Note: It is RECOMMENDED to capture the value as provided by the application + // without attempting to do any case normalization. + // + // The collection name SHOULD NOT be extracted from `db.query.text`, + // when the database system supports query text with multiple collections + // in non-batch operations. + // + // For batch operations, if the individual operations are known to have the same + // collection name then that collection name SHOULD be used. + DBCollectionNameKey = attribute.Key("db.collection.name") + + // DBNamespaceKey is the attribute Key conforming to the "db.namespace" semantic + // conventions. It represents the name of the database, fully qualified within + // the server address and port. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "customers", "test.users" + // Note: If a database system has multiple namespace components, they SHOULD be + // concatenated from the most general to the most specific namespace component, + // using `|` as a separator between the components. Any missing components (and + // their associated separators) SHOULD be omitted. + // Semantic conventions for individual database systems SHOULD document what + // `db.namespace` means in the context of that system. + // It is RECOMMENDED to capture the value as provided by the application without + // attempting to do any case normalization. + DBNamespaceKey = attribute.Key("db.namespace") + + // DBOperationBatchSizeKey is the attribute Key conforming to the + // "db.operation.batch.size" semantic conventions. It represents the number of + // queries included in a batch operation. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: 2, 3, 4 + // Note: Operations are only considered batches when they contain two or more + // operations, and so `db.operation.batch.size` SHOULD never be `1`. + DBOperationBatchSizeKey = attribute.Key("db.operation.batch.size") + + // DBOperationNameKey is the attribute Key conforming to the "db.operation.name" + // semantic conventions. It represents the name of the operation or command + // being executed. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "findAndModify", "HMSET", "SELECT" + // Note: It is RECOMMENDED to capture the value as provided by the application + // without attempting to do any case normalization. + // + // The operation name SHOULD NOT be extracted from `db.query.text`, + // when the database system supports query text with multiple operations + // in non-batch operations. + // + // If spaces can occur in the operation name, multiple consecutive spaces + // SHOULD be normalized to a single space. + // + // For batch operations, if the individual operations are known to have the same + // operation name + // then that operation name SHOULD be used prepended by `BATCH `, + // otherwise `db.operation.name` SHOULD be `BATCH` or some other database + // system specific term if more applicable. + DBOperationNameKey = attribute.Key("db.operation.name") + + // DBQuerySummaryKey is the attribute Key conforming to the "db.query.summary" + // semantic conventions. It represents the low cardinality summary of a database + // query. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "SELECT wuser_table", "INSERT shipping_details SELECT orders", "get + // user by id" + // Note: The query summary describes a class of database queries and is useful + // as a grouping key, especially when analyzing telemetry for database + // calls involving complex queries. + // + // Summary may be available to the instrumentation through + // instrumentation hooks or other means. If it is not available, + // instrumentations + // that support query parsing SHOULD generate a summary following + // [Generating query summary] + // section. + // + // [Generating query summary]: /docs/database/database-spans.md#generating-a-summary-of-the-query + DBQuerySummaryKey = attribute.Key("db.query.summary") + + // DBQueryTextKey is the attribute Key conforming to the "db.query.text" + // semantic conventions. It represents the database query being executed. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "SELECT * FROM wuser_table where username = ?", "SET mykey ?" + // Note: For sanitization see [Sanitization of `db.query.text`]. + // For batch operations, if the individual operations are known to have the same + // query text then that query text SHOULD be used, otherwise all of the + // individual query texts SHOULD be concatenated with separator `; ` or some + // other database system specific separator if more applicable. + // Parameterized query text SHOULD NOT be sanitized. Even though parameterized + // query text can potentially have sensitive data, by using a parameterized + // query the user is giving a strong signal that any sensitive data will be + // passed as parameter values, and the benefit to observability of capturing the + // static part of the query text by default outweighs the risk. + // + // [Sanitization of `db.query.text`]: /docs/database/database-spans.md#sanitization-of-dbquerytext + DBQueryTextKey = attribute.Key("db.query.text") + + // DBResponseReturnedRowsKey is the attribute Key conforming to the + // "db.response.returned_rows" semantic conventions. It represents the number of + // rows returned by the operation. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 10, 30, 1000 + DBResponseReturnedRowsKey = attribute.Key("db.response.returned_rows") + + // DBResponseStatusCodeKey is the attribute Key conforming to the + // "db.response.status_code" semantic conventions. It represents the database + // response status code. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "102", "ORA-17002", "08P01", "404" + // Note: The status code returned by the database. Usually it represents an + // error code, but may also represent partial success, warning, or differentiate + // between various types of successful outcomes. + // Semantic conventions for individual database systems SHOULD document what + // `db.response.status_code` means in the context of that system. + DBResponseStatusCodeKey = attribute.Key("db.response.status_code") + + // DBStoredProcedureNameKey is the attribute Key conforming to the + // "db.stored_procedure.name" semantic conventions. It represents the name of a + // stored procedure within the database. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "GetCustomer" + // Note: It is RECOMMENDED to capture the value as provided by the application + // without attempting to do any case normalization. + // + // For batch operations, if the individual operations are known to have the same + // stored procedure name then that stored procedure name SHOULD be used. + DBStoredProcedureNameKey = attribute.Key("db.stored_procedure.name") + + // DBSystemNameKey is the attribute Key conforming to the "db.system.name" + // semantic conventions. It represents the database management system (DBMS) + // product as identified by the client instrumentation. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: + // Note: The actual DBMS may differ from the one identified by the client. For + // example, when using PostgreSQL client libraries to connect to a CockroachDB, + // the `db.system.name` is set to `postgresql` based on the instrumentation's + // best knowledge. + DBSystemNameKey = attribute.Key("db.system.name") +) + +// DBClientConnectionPoolName returns an attribute KeyValue conforming to the +// "db.client.connection.pool.name" semantic conventions. It represents the name +// of the connection pool; unique within the instrumented application. In case +// the connection pool implementation doesn't provide a name, instrumentation +// SHOULD use a combination of parameters that would make the name unique, for +// example, combining attributes `server.address`, `server.port`, and +// `db.namespace`, formatted as `server.address:server.port/db.namespace`. +// Instrumentations that generate connection pool name following different +// patterns SHOULD document it. +func DBClientConnectionPoolName(val string) attribute.KeyValue { + return DBClientConnectionPoolNameKey.String(val) +} + +// DBCollectionName returns an attribute KeyValue conforming to the +// "db.collection.name" semantic conventions. It represents the name of a +// collection (table, container) within the database. +func DBCollectionName(val string) attribute.KeyValue { + return DBCollectionNameKey.String(val) +} + +// DBNamespace returns an attribute KeyValue conforming to the "db.namespace" +// semantic conventions. It represents the name of the database, fully qualified +// within the server address and port. +func DBNamespace(val string) attribute.KeyValue { + return DBNamespaceKey.String(val) +} + +// DBOperationBatchSize returns an attribute KeyValue conforming to the +// "db.operation.batch.size" semantic conventions. It represents the number of +// queries included in a batch operation. +func DBOperationBatchSize(val int) attribute.KeyValue { + return DBOperationBatchSizeKey.Int(val) +} + +// DBOperationName returns an attribute KeyValue conforming to the +// "db.operation.name" semantic conventions. It represents the name of the +// operation or command being executed. +func DBOperationName(val string) attribute.KeyValue { + return DBOperationNameKey.String(val) +} + +// DBQuerySummary returns an attribute KeyValue conforming to the +// "db.query.summary" semantic conventions. It represents the low cardinality +// summary of a database query. +func DBQuerySummary(val string) attribute.KeyValue { + return DBQuerySummaryKey.String(val) +} + +// DBQueryText returns an attribute KeyValue conforming to the "db.query.text" +// semantic conventions. It represents the database query being executed. +func DBQueryText(val string) attribute.KeyValue { + return DBQueryTextKey.String(val) +} + +// DBResponseReturnedRows returns an attribute KeyValue conforming to the +// "db.response.returned_rows" semantic conventions. It represents the number of +// rows returned by the operation. +func DBResponseReturnedRows(val int) attribute.KeyValue { + return DBResponseReturnedRowsKey.Int(val) +} + +// DBResponseStatusCode returns an attribute KeyValue conforming to the +// "db.response.status_code" semantic conventions. It represents the database +// response status code. +func DBResponseStatusCode(val string) attribute.KeyValue { + return DBResponseStatusCodeKey.String(val) +} + +// DBStoredProcedureName returns an attribute KeyValue conforming to the +// "db.stored_procedure.name" semantic conventions. It represents the name of a +// stored procedure within the database. +func DBStoredProcedureName(val string) attribute.KeyValue { + return DBStoredProcedureNameKey.String(val) +} + +// Enum values for db.client.connection.state +var ( + // idle + // Stability: development + DBClientConnectionStateIdle = DBClientConnectionStateKey.String("idle") + // used + // Stability: development + DBClientConnectionStateUsed = DBClientConnectionStateKey.String("used") +) + +// Enum values for db.system.name +var ( + // Some other SQL database. Fallback only. + // Stability: development + DBSystemNameOtherSQL = DBSystemNameKey.String("other_sql") + // [Adabas (Adaptable Database System)] + // Stability: development + // + // [Adabas (Adaptable Database System)]: https://documentation.softwareag.com/?pf=adabas + DBSystemNameSoftwareagAdabas = DBSystemNameKey.String("softwareag.adabas") + // [Actian Ingres] + // Stability: development + // + // [Actian Ingres]: https://www.actian.com/databases/ingres/ + DBSystemNameActianIngres = DBSystemNameKey.String("actian.ingres") + // [Amazon DynamoDB] + // Stability: development + // + // [Amazon DynamoDB]: https://aws.amazon.com/pm/dynamodb/ + DBSystemNameAWSDynamoDB = DBSystemNameKey.String("aws.dynamodb") + // [Amazon Redshift] + // Stability: development + // + // [Amazon Redshift]: https://aws.amazon.com/redshift/ + DBSystemNameAWSRedshift = DBSystemNameKey.String("aws.redshift") + // [Azure Cosmos DB] + // Stability: development + // + // [Azure Cosmos DB]: https://learn.microsoft.com/azure/cosmos-db + DBSystemNameAzureCosmosDB = DBSystemNameKey.String("azure.cosmosdb") + // [InterSystems Caché] + // Stability: development + // + // [InterSystems Caché]: https://www.intersystems.com/products/cache/ + DBSystemNameIntersystemsCache = DBSystemNameKey.String("intersystems.cache") + // [Apache Cassandra] + // Stability: development + // + // [Apache Cassandra]: https://cassandra.apache.org/ + DBSystemNameCassandra = DBSystemNameKey.String("cassandra") + // [ClickHouse] + // Stability: development + // + // [ClickHouse]: https://clickhouse.com/ + DBSystemNameClickHouse = DBSystemNameKey.String("clickhouse") + // [CockroachDB] + // Stability: development + // + // [CockroachDB]: https://www.cockroachlabs.com/ + DBSystemNameCockroachDB = DBSystemNameKey.String("cockroachdb") + // [Couchbase] + // Stability: development + // + // [Couchbase]: https://www.couchbase.com/ + DBSystemNameCouchbase = DBSystemNameKey.String("couchbase") + // [Apache CouchDB] + // Stability: development + // + // [Apache CouchDB]: https://couchdb.apache.org/ + DBSystemNameCouchDB = DBSystemNameKey.String("couchdb") + // [Apache Derby] + // Stability: development + // + // [Apache Derby]: https://db.apache.org/derby/ + DBSystemNameDerby = DBSystemNameKey.String("derby") + // [Elasticsearch] + // Stability: development + // + // [Elasticsearch]: https://www.elastic.co/elasticsearch + DBSystemNameElasticsearch = DBSystemNameKey.String("elasticsearch") + // [Firebird] + // Stability: development + // + // [Firebird]: https://www.firebirdsql.org/ + DBSystemNameFirebirdSQL = DBSystemNameKey.String("firebirdsql") + // [Google Cloud Spanner] + // Stability: development + // + // [Google Cloud Spanner]: https://cloud.google.com/spanner + DBSystemNameGCPSpanner = DBSystemNameKey.String("gcp.spanner") + // [Apache Geode] + // Stability: development + // + // [Apache Geode]: https://geode.apache.org/ + DBSystemNameGeode = DBSystemNameKey.String("geode") + // [H2 Database] + // Stability: development + // + // [H2 Database]: https://h2database.com/ + DBSystemNameH2database = DBSystemNameKey.String("h2database") + // [Apache HBase] + // Stability: development + // + // [Apache HBase]: https://hbase.apache.org/ + DBSystemNameHBase = DBSystemNameKey.String("hbase") + // [Apache Hive] + // Stability: development + // + // [Apache Hive]: https://hive.apache.org/ + DBSystemNameHive = DBSystemNameKey.String("hive") + // [HyperSQL Database] + // Stability: development + // + // [HyperSQL Database]: https://hsqldb.org/ + DBSystemNameHSQLDB = DBSystemNameKey.String("hsqldb") + // [IBM Db2] + // Stability: development + // + // [IBM Db2]: https://www.ibm.com/db2 + DBSystemNameIBMDB2 = DBSystemNameKey.String("ibm.db2") + // [IBM Informix] + // Stability: development + // + // [IBM Informix]: https://www.ibm.com/products/informix + DBSystemNameIBMInformix = DBSystemNameKey.String("ibm.informix") + // [IBM Netezza] + // Stability: development + // + // [IBM Netezza]: https://www.ibm.com/products/netezza + DBSystemNameIBMNetezza = DBSystemNameKey.String("ibm.netezza") + // [InfluxDB] + // Stability: development + // + // [InfluxDB]: https://www.influxdata.com/ + DBSystemNameInfluxDB = DBSystemNameKey.String("influxdb") + // [Instant] + // Stability: development + // + // [Instant]: https://www.instantdb.com/ + DBSystemNameInstantDB = DBSystemNameKey.String("instantdb") + // [MariaDB] + // Stability: stable + // + // [MariaDB]: https://mariadb.org/ + DBSystemNameMariaDB = DBSystemNameKey.String("mariadb") + // [Memcached] + // Stability: development + // + // [Memcached]: https://memcached.org/ + DBSystemNameMemcached = DBSystemNameKey.String("memcached") + // [MongoDB] + // Stability: development + // + // [MongoDB]: https://www.mongodb.com/ + DBSystemNameMongoDB = DBSystemNameKey.String("mongodb") + // [Microsoft SQL Server] + // Stability: stable + // + // [Microsoft SQL Server]: https://www.microsoft.com/sql-server + DBSystemNameMicrosoftSQLServer = DBSystemNameKey.String("microsoft.sql_server") + // [MySQL] + // Stability: stable + // + // [MySQL]: https://www.mysql.com/ + DBSystemNameMySQL = DBSystemNameKey.String("mysql") + // [Neo4j] + // Stability: development + // + // [Neo4j]: https://neo4j.com/ + DBSystemNameNeo4j = DBSystemNameKey.String("neo4j") + // [OpenSearch] + // Stability: development + // + // [OpenSearch]: https://opensearch.org/ + DBSystemNameOpenSearch = DBSystemNameKey.String("opensearch") + // [Oracle Database] + // Stability: development + // + // [Oracle Database]: https://www.oracle.com/database/ + DBSystemNameOracleDB = DBSystemNameKey.String("oracle.db") + // [PostgreSQL] + // Stability: stable + // + // [PostgreSQL]: https://www.postgresql.org/ + DBSystemNamePostgreSQL = DBSystemNameKey.String("postgresql") + // [Redis] + // Stability: development + // + // [Redis]: https://redis.io/ + DBSystemNameRedis = DBSystemNameKey.String("redis") + // [SAP HANA] + // Stability: development + // + // [SAP HANA]: https://www.sap.com/products/technology-platform/hana/what-is-sap-hana.html + DBSystemNameSAPHANA = DBSystemNameKey.String("sap.hana") + // [SAP MaxDB] + // Stability: development + // + // [SAP MaxDB]: https://maxdb.sap.com/ + DBSystemNameSAPMaxDB = DBSystemNameKey.String("sap.maxdb") + // [SQLite] + // Stability: development + // + // [SQLite]: https://www.sqlite.org/ + DBSystemNameSQLite = DBSystemNameKey.String("sqlite") + // [Teradata] + // Stability: development + // + // [Teradata]: https://www.teradata.com/ + DBSystemNameTeradata = DBSystemNameKey.String("teradata") + // [Trino] + // Stability: development + // + // [Trino]: https://trino.io/ + DBSystemNameTrino = DBSystemNameKey.String("trino") +) + +// Namespace: deployment +const ( + // DeploymentEnvironmentNameKey is the attribute Key conforming to the + // "deployment.environment.name" semantic conventions. It represents the name of + // the [deployment environment] (aka deployment tier). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "staging", "production" + // Note: `deployment.environment.name` does not affect the uniqueness + // constraints defined through + // the `service.namespace`, `service.name` and `service.instance.id` resource + // attributes. + // This implies that resources carrying the following attribute combinations + // MUST be + // considered to be identifying the same service: + // + // - `service.name=frontend`, `deployment.environment.name=production` + // - `service.name=frontend`, `deployment.environment.name=staging`. + // + // + // [deployment environment]: https://wikipedia.org/wiki/Deployment_environment + DeploymentEnvironmentNameKey = attribute.Key("deployment.environment.name") + + // DeploymentIDKey is the attribute Key conforming to the "deployment.id" + // semantic conventions. It represents the id of the deployment. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1208" + DeploymentIDKey = attribute.Key("deployment.id") + + // DeploymentNameKey is the attribute Key conforming to the "deployment.name" + // semantic conventions. It represents the name of the deployment. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "deploy my app", "deploy-frontend" + DeploymentNameKey = attribute.Key("deployment.name") + + // DeploymentStatusKey is the attribute Key conforming to the + // "deployment.status" semantic conventions. It represents the status of the + // deployment. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + DeploymentStatusKey = attribute.Key("deployment.status") +) + +// DeploymentEnvironmentName returns an attribute KeyValue conforming to the +// "deployment.environment.name" semantic conventions. It represents the name of +// the [deployment environment] (aka deployment tier). +// +// [deployment environment]: https://wikipedia.org/wiki/Deployment_environment +func DeploymentEnvironmentName(val string) attribute.KeyValue { + return DeploymentEnvironmentNameKey.String(val) +} + +// DeploymentID returns an attribute KeyValue conforming to the "deployment.id" +// semantic conventions. It represents the id of the deployment. +func DeploymentID(val string) attribute.KeyValue { + return DeploymentIDKey.String(val) +} + +// DeploymentName returns an attribute KeyValue conforming to the +// "deployment.name" semantic conventions. It represents the name of the +// deployment. +func DeploymentName(val string) attribute.KeyValue { + return DeploymentNameKey.String(val) +} + +// Enum values for deployment.status +var ( + // failed + // Stability: development + DeploymentStatusFailed = DeploymentStatusKey.String("failed") + // succeeded + // Stability: development + DeploymentStatusSucceeded = DeploymentStatusKey.String("succeeded") +) + +// Namespace: destination +const ( + // DestinationAddressKey is the attribute Key conforming to the + // "destination.address" semantic conventions. It represents the destination + // address - domain name if available without reverse DNS lookup; otherwise, IP + // address or Unix domain socket name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "destination.example.com", "10.1.2.80", "/tmp/my.sock" + // Note: When observed from the source side, and when communicating through an + // intermediary, `destination.address` SHOULD represent the destination address + // behind any intermediaries, for example proxies, if it's available. + DestinationAddressKey = attribute.Key("destination.address") + + // DestinationPortKey is the attribute Key conforming to the "destination.port" + // semantic conventions. It represents the destination port number. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 3389, 2888 + DestinationPortKey = attribute.Key("destination.port") +) + +// DestinationAddress returns an attribute KeyValue conforming to the +// "destination.address" semantic conventions. It represents the destination +// address - domain name if available without reverse DNS lookup; otherwise, IP +// address or Unix domain socket name. +func DestinationAddress(val string) attribute.KeyValue { + return DestinationAddressKey.String(val) +} + +// DestinationPort returns an attribute KeyValue conforming to the +// "destination.port" semantic conventions. It represents the destination port +// number. +func DestinationPort(val int) attribute.KeyValue { + return DestinationPortKey.Int(val) +} + +// Namespace: device +const ( + // DeviceIDKey is the attribute Key conforming to the "device.id" semantic + // conventions. It represents a unique identifier representing the device. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "123456789012345", "01:23:45:67:89:AB" + // Note: Its value SHOULD be identical for all apps on a device and it SHOULD + // NOT change if an app is uninstalled and re-installed. + // However, it might be resettable by the user for all apps on a device. + // Hardware IDs (e.g. vendor-specific serial number, IMEI or MAC address) MAY be + // used as values. + // + // More information about Android identifier best practices can be found [here] + // . + // + // > [!WARNING]> This attribute may contain sensitive (PII) information. Caution + // > should be taken when storing personal data or anything which can identify a + // > user. GDPR and data protection laws may apply, + // > ensure you do your own due diligence.> Due to these reasons, this + // > identifier is not recommended for consumer applications and will likely + // > result in rejection from both Google Play and App Store. + // > However, it may be appropriate for specific enterprise scenarios, such as + // > kiosk devices or enterprise-managed devices, with appropriate compliance + // > clearance. + // > Any instrumentation providing this identifier MUST implement it as an + // > opt-in feature.> See [`app.installation.id`]> for a more + // > privacy-preserving alternative. + // + // [here]: https://developer.android.com/training/articles/user-data-ids + // [`app.installation.id`]: /docs/registry/attributes/app.md#app-installation-id + DeviceIDKey = attribute.Key("device.id") + + // DeviceManufacturerKey is the attribute Key conforming to the + // "device.manufacturer" semantic conventions. It represents the name of the + // device manufacturer. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Apple", "Samsung" + // Note: The Android OS provides this field via [Build]. iOS apps SHOULD + // hardcode the value `Apple`. + // + // [Build]: https://developer.android.com/reference/android/os/Build#MANUFACTURER + DeviceManufacturerKey = attribute.Key("device.manufacturer") + + // DeviceModelIdentifierKey is the attribute Key conforming to the + // "device.model.identifier" semantic conventions. It represents the model + // identifier for the device. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "iPhone3,4", "SM-G920F" + // Note: It's recommended this value represents a machine-readable version of + // the model identifier rather than the market or consumer-friendly name of the + // device. + DeviceModelIdentifierKey = attribute.Key("device.model.identifier") + + // DeviceModelNameKey is the attribute Key conforming to the "device.model.name" + // semantic conventions. It represents the marketing name for the device model. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "iPhone 6s Plus", "Samsung Galaxy S6" + // Note: It's recommended this value represents a human-readable version of the + // device model rather than a machine-readable alternative. + DeviceModelNameKey = attribute.Key("device.model.name") +) + +// DeviceID returns an attribute KeyValue conforming to the "device.id" semantic +// conventions. It represents a unique identifier representing the device. +func DeviceID(val string) attribute.KeyValue { + return DeviceIDKey.String(val) +} + +// DeviceManufacturer returns an attribute KeyValue conforming to the +// "device.manufacturer" semantic conventions. It represents the name of the +// device manufacturer. +func DeviceManufacturer(val string) attribute.KeyValue { + return DeviceManufacturerKey.String(val) +} + +// DeviceModelIdentifier returns an attribute KeyValue conforming to the +// "device.model.identifier" semantic conventions. It represents the model +// identifier for the device. +func DeviceModelIdentifier(val string) attribute.KeyValue { + return DeviceModelIdentifierKey.String(val) +} + +// DeviceModelName returns an attribute KeyValue conforming to the +// "device.model.name" semantic conventions. It represents the marketing name for +// the device model. +func DeviceModelName(val string) attribute.KeyValue { + return DeviceModelNameKey.String(val) +} + +// Namespace: disk +const ( + // DiskIODirectionKey is the attribute Key conforming to the "disk.io.direction" + // semantic conventions. It represents the disk IO operation direction. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "read" + DiskIODirectionKey = attribute.Key("disk.io.direction") +) + +// Enum values for disk.io.direction +var ( + // read + // Stability: development + DiskIODirectionRead = DiskIODirectionKey.String("read") + // write + // Stability: development + DiskIODirectionWrite = DiskIODirectionKey.String("write") +) + +// Namespace: dns +const ( + // DNSQuestionNameKey is the attribute Key conforming to the "dns.question.name" + // semantic conventions. It represents the name being queried. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "www.example.com", "opentelemetry.io" + // Note: If the name field contains non-printable characters (below 32 or above + // 126), those characters should be represented as escaped base 10 integers + // (\DDD). Back slashes and quotes should be escaped. Tabs, carriage returns, + // and line feeds should be converted to \t, \r, and \n respectively. + DNSQuestionNameKey = attribute.Key("dns.question.name") +) + +// DNSQuestionName returns an attribute KeyValue conforming to the +// "dns.question.name" semantic conventions. It represents the name being +// queried. +func DNSQuestionName(val string) attribute.KeyValue { + return DNSQuestionNameKey.String(val) +} + +// Namespace: elasticsearch +const ( + // ElasticsearchNodeNameKey is the attribute Key conforming to the + // "elasticsearch.node.name" semantic conventions. It represents the represents + // the human-readable identifier of the node/instance to which a request was + // routed. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "instance-0000000001" + ElasticsearchNodeNameKey = attribute.Key("elasticsearch.node.name") +) + +// ElasticsearchNodeName returns an attribute KeyValue conforming to the +// "elasticsearch.node.name" semantic conventions. It represents the represents +// the human-readable identifier of the node/instance to which a request was +// routed. +func ElasticsearchNodeName(val string) attribute.KeyValue { + return ElasticsearchNodeNameKey.String(val) +} + +// Namespace: enduser +const ( + // EnduserIDKey is the attribute Key conforming to the "enduser.id" semantic + // conventions. It represents the unique identifier of an end user in the + // system. It maybe a username, email address, or other identifier. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "username" + // Note: Unique identifier of an end user in the system. + // + // > [!Warning] + // > This field contains sensitive (PII) information. + EnduserIDKey = attribute.Key("enduser.id") + + // EnduserPseudoIDKey is the attribute Key conforming to the "enduser.pseudo.id" + // semantic conventions. It represents the pseudonymous identifier of an end + // user. This identifier should be a random value that is not directly linked or + // associated with the end user's actual identity. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "QdH5CAWJgqVT4rOr0qtumf" + // Note: Pseudonymous identifier of an end user. + // + // > [!Warning] + // > This field contains sensitive (linkable PII) information. + EnduserPseudoIDKey = attribute.Key("enduser.pseudo.id") +) + +// EnduserID returns an attribute KeyValue conforming to the "enduser.id" +// semantic conventions. It represents the unique identifier of an end user in +// the system. It maybe a username, email address, or other identifier. +func EnduserID(val string) attribute.KeyValue { + return EnduserIDKey.String(val) +} + +// EnduserPseudoID returns an attribute KeyValue conforming to the +// "enduser.pseudo.id" semantic conventions. It represents the pseudonymous +// identifier of an end user. This identifier should be a random value that is +// not directly linked or associated with the end user's actual identity. +func EnduserPseudoID(val string) attribute.KeyValue { + return EnduserPseudoIDKey.String(val) +} + +// Namespace: error +const ( + // ErrorMessageKey is the attribute Key conforming to the "error.message" + // semantic conventions. It represents a message providing more detail about an + // error in human-readable form. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Unexpected input type: string", "The user has exceeded their + // storage quota" + // Note: `error.message` should provide additional context and detail about an + // error. + // It is NOT RECOMMENDED to duplicate the value of `error.type` in + // `error.message`. + // It is also NOT RECOMMENDED to duplicate the value of `exception.message` in + // `error.message`. + // + // `error.message` is NOT RECOMMENDED for metrics or spans due to its unbounded + // cardinality and overlap with span status. + ErrorMessageKey = attribute.Key("error.message") + + // ErrorTypeKey is the attribute Key conforming to the "error.type" semantic + // conventions. It represents the describes a class of error the operation ended + // with. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "timeout", "java.net.UnknownHostException", + // "server_certificate_invalid", "500" + // Note: The `error.type` SHOULD be predictable, and SHOULD have low + // cardinality. + // + // When `error.type` is set to a type (e.g., an exception type), its + // canonical class name identifying the type within the artifact SHOULD be used. + // + // Instrumentations SHOULD document the list of errors they report. + // + // The cardinality of `error.type` within one instrumentation library SHOULD be + // low. + // Telemetry consumers that aggregate data from multiple instrumentation + // libraries and applications + // should be prepared for `error.type` to have high cardinality at query time + // when no + // additional filters are applied. + // + // If the operation has completed successfully, instrumentations SHOULD NOT set + // `error.type`. + // + // If a specific domain defines its own set of error identifiers (such as HTTP + // or gRPC status codes), + // it's RECOMMENDED to: + // + // - Use a domain-specific attribute + // - Set `error.type` to capture all errors, regardless of whether they are + // defined within the domain-specific set or not. + ErrorTypeKey = attribute.Key("error.type") +) + +// ErrorMessage returns an attribute KeyValue conforming to the "error.message" +// semantic conventions. It represents a message providing more detail about an +// error in human-readable form. +func ErrorMessage(val string) attribute.KeyValue { + return ErrorMessageKey.String(val) +} + +// Enum values for error.type +var ( + // A fallback error value to be used when the instrumentation doesn't define a + // custom value. + // + // Stability: stable + ErrorTypeOther = ErrorTypeKey.String("_OTHER") +) + +// Namespace: exception +const ( + // ExceptionMessageKey is the attribute Key conforming to the + // "exception.message" semantic conventions. It represents the exception + // message. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "Division by zero", "Can't convert 'int' object to str implicitly" + ExceptionMessageKey = attribute.Key("exception.message") + + // ExceptionStacktraceKey is the attribute Key conforming to the + // "exception.stacktrace" semantic conventions. It represents a stacktrace as a + // string in the natural representation for the language runtime. The + // representation is to be determined and documented by each language SIG. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: Exception in thread "main" java.lang.RuntimeException: Test + // exception\n at com.example.GenerateTrace.methodB(GenerateTrace.java:13)\n at + // com.example.GenerateTrace.methodA(GenerateTrace.java:9)\n at + // com.example.GenerateTrace.main(GenerateTrace.java:5) + ExceptionStacktraceKey = attribute.Key("exception.stacktrace") + + // ExceptionTypeKey is the attribute Key conforming to the "exception.type" + // semantic conventions. It represents the type of the exception (its + // fully-qualified class name, if applicable). The dynamic type of the exception + // should be preferred over the static type in languages that support it. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "java.net.ConnectException", "OSError" + ExceptionTypeKey = attribute.Key("exception.type") +) + +// ExceptionMessage returns an attribute KeyValue conforming to the +// "exception.message" semantic conventions. It represents the exception message. +func ExceptionMessage(val string) attribute.KeyValue { + return ExceptionMessageKey.String(val) +} + +// ExceptionStacktrace returns an attribute KeyValue conforming to the +// "exception.stacktrace" semantic conventions. It represents a stacktrace as a +// string in the natural representation for the language runtime. The +// representation is to be determined and documented by each language SIG. +func ExceptionStacktrace(val string) attribute.KeyValue { + return ExceptionStacktraceKey.String(val) +} + +// ExceptionType returns an attribute KeyValue conforming to the "exception.type" +// semantic conventions. It represents the type of the exception (its +// fully-qualified class name, if applicable). The dynamic type of the exception +// should be preferred over the static type in languages that support it. +func ExceptionType(val string) attribute.KeyValue { + return ExceptionTypeKey.String(val) +} + +// Namespace: faas +const ( + // FaaSColdstartKey is the attribute Key conforming to the "faas.coldstart" + // semantic conventions. It represents a boolean that is true if the serverless + // function is executed for the first time (aka cold-start). + // + // Type: boolean + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + FaaSColdstartKey = attribute.Key("faas.coldstart") + + // FaaSCronKey is the attribute Key conforming to the "faas.cron" semantic + // conventions. It represents a string containing the schedule period as + // [Cron Expression]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 0/5 * * * ? * + // + // [Cron Expression]: https://docs.oracle.com/cd/E12058_01/doc/doc.1014/e12030/cron_expressions.htm + FaaSCronKey = attribute.Key("faas.cron") + + // FaaSDocumentCollectionKey is the attribute Key conforming to the + // "faas.document.collection" semantic conventions. It represents the name of + // the source on which the triggering operation was performed. For example, in + // Cloud Storage or S3 corresponds to the bucket name, and in Cosmos DB to the + // database name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "myBucketName", "myDbName" + FaaSDocumentCollectionKey = attribute.Key("faas.document.collection") + + // FaaSDocumentNameKey is the attribute Key conforming to the + // "faas.document.name" semantic conventions. It represents the document + // name/table subjected to the operation. For example, in Cloud Storage or S3 is + // the name of the file, and in Cosmos DB the table name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "myFile.txt", "myTableName" + FaaSDocumentNameKey = attribute.Key("faas.document.name") + + // FaaSDocumentOperationKey is the attribute Key conforming to the + // "faas.document.operation" semantic conventions. It represents the describes + // the type of the operation that was performed on the data. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + FaaSDocumentOperationKey = attribute.Key("faas.document.operation") + + // FaaSDocumentTimeKey is the attribute Key conforming to the + // "faas.document.time" semantic conventions. It represents a string containing + // the time when the data was accessed in the [ISO 8601] format expressed in + // [UTC]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 2020-01-23T13:47:06Z + // + // [ISO 8601]: https://www.iso.org/iso-8601-date-and-time-format.html + // [UTC]: https://www.w3.org/TR/NOTE-datetime + FaaSDocumentTimeKey = attribute.Key("faas.document.time") + + // FaaSInstanceKey is the attribute Key conforming to the "faas.instance" + // semantic conventions. It represents the execution environment ID as a string, + // that will be potentially reused for other invocations to the same + // function/function version. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2021/06/28/[$LATEST]2f399eb14537447da05ab2a2e39309de" + // Note: - **AWS Lambda:** Use the (full) log stream name. + FaaSInstanceKey = attribute.Key("faas.instance") + + // FaaSInvocationIDKey is the attribute Key conforming to the + // "faas.invocation_id" semantic conventions. It represents the invocation ID of + // the current function invocation. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: af9d5aa4-a685-4c5f-a22b-444f80b3cc28 + FaaSInvocationIDKey = attribute.Key("faas.invocation_id") + + // FaaSInvokedNameKey is the attribute Key conforming to the "faas.invoked_name" + // semantic conventions. It represents the name of the invoked function. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: my-function + // Note: SHOULD be equal to the `faas.name` resource attribute of the invoked + // function. + FaaSInvokedNameKey = attribute.Key("faas.invoked_name") + + // FaaSInvokedProviderKey is the attribute Key conforming to the + // "faas.invoked_provider" semantic conventions. It represents the cloud + // provider of the invoked function. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: SHOULD be equal to the `cloud.provider` resource attribute of the + // invoked function. + FaaSInvokedProviderKey = attribute.Key("faas.invoked_provider") + + // FaaSInvokedRegionKey is the attribute Key conforming to the + // "faas.invoked_region" semantic conventions. It represents the cloud region of + // the invoked function. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: eu-central-1 + // Note: SHOULD be equal to the `cloud.region` resource attribute of the invoked + // function. + FaaSInvokedRegionKey = attribute.Key("faas.invoked_region") + + // FaaSMaxMemoryKey is the attribute Key conforming to the "faas.max_memory" + // semantic conventions. It represents the amount of memory available to the + // serverless function converted to Bytes. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Note: It's recommended to set this attribute since e.g. too little memory can + // easily stop a Java AWS Lambda function from working correctly. On AWS Lambda, + // the environment variable `AWS_LAMBDA_FUNCTION_MEMORY_SIZE` provides this + // information (which must be multiplied by 1,048,576). + FaaSMaxMemoryKey = attribute.Key("faas.max_memory") + + // FaaSNameKey is the attribute Key conforming to the "faas.name" semantic + // conventions. It represents the name of the single function that this runtime + // instance executes. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-function", "myazurefunctionapp/some-function-name" + // Note: This is the name of the function as configured/deployed on the FaaS + // platform and is usually different from the name of the callback + // function (which may be stored in the + // [`code.namespace`/`code.function.name`] + // span attributes). + // + // For some cloud providers, the above definition is ambiguous. The following + // definition of function name MUST be used for this attribute + // (and consequently the span name) for the listed cloud providers/products: + // + // - **Azure:** The full name `/`, i.e., function app name + // followed by a forward slash followed by the function name (this form + // can also be seen in the resource JSON for the function). + // This means that a span attribute MUST be used, as an Azure function + // app can host multiple functions that would usually share + // a TracerProvider (see also the `cloud.resource_id` attribute). + // + // + // [`code.namespace`/`code.function.name`]: /docs/general/attributes.md#source-code-attributes + FaaSNameKey = attribute.Key("faas.name") + + // FaaSTimeKey is the attribute Key conforming to the "faas.time" semantic + // conventions. It represents a string containing the function invocation time + // in the [ISO 8601] format expressed in [UTC]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 2020-01-23T13:47:06Z + // + // [ISO 8601]: https://www.iso.org/iso-8601-date-and-time-format.html + // [UTC]: https://www.w3.org/TR/NOTE-datetime + FaaSTimeKey = attribute.Key("faas.time") + + // FaaSTriggerKey is the attribute Key conforming to the "faas.trigger" semantic + // conventions. It represents the type of the trigger which caused this function + // invocation. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + FaaSTriggerKey = attribute.Key("faas.trigger") + + // FaaSVersionKey is the attribute Key conforming to the "faas.version" semantic + // conventions. It represents the immutable version of the function being + // executed. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "26", "pinkfroid-00002" + // Note: Depending on the cloud provider and platform, use: + // + // - **AWS Lambda:** The [function version] + // (an integer represented as a decimal string). + // - **Google Cloud Run (Services):** The [revision] + // (i.e., the function name plus the revision suffix). + // - **Google Cloud Functions:** The value of the + // [`K_REVISION` environment variable]. + // - **Azure Functions:** Not applicable. Do not set this attribute. + // + // + // [function version]: https://docs.aws.amazon.com/lambda/latest/dg/configuration-versions.html + // [revision]: https://cloud.google.com/run/docs/managing/revisions + // [`K_REVISION` environment variable]: https://cloud.google.com/functions/docs/env-var#runtime_environment_variables_set_automatically + FaaSVersionKey = attribute.Key("faas.version") +) + +// FaaSColdstart returns an attribute KeyValue conforming to the "faas.coldstart" +// semantic conventions. It represents a boolean that is true if the serverless +// function is executed for the first time (aka cold-start). +func FaaSColdstart(val bool) attribute.KeyValue { + return FaaSColdstartKey.Bool(val) +} + +// FaaSCron returns an attribute KeyValue conforming to the "faas.cron" semantic +// conventions. It represents a string containing the schedule period as +// [Cron Expression]. +// +// [Cron Expression]: https://docs.oracle.com/cd/E12058_01/doc/doc.1014/e12030/cron_expressions.htm +func FaaSCron(val string) attribute.KeyValue { + return FaaSCronKey.String(val) +} + +// FaaSDocumentCollection returns an attribute KeyValue conforming to the +// "faas.document.collection" semantic conventions. It represents the name of the +// source on which the triggering operation was performed. For example, in Cloud +// Storage or S3 corresponds to the bucket name, and in Cosmos DB to the database +// name. +func FaaSDocumentCollection(val string) attribute.KeyValue { + return FaaSDocumentCollectionKey.String(val) +} + +// FaaSDocumentName returns an attribute KeyValue conforming to the +// "faas.document.name" semantic conventions. It represents the document +// name/table subjected to the operation. For example, in Cloud Storage or S3 is +// the name of the file, and in Cosmos DB the table name. +func FaaSDocumentName(val string) attribute.KeyValue { + return FaaSDocumentNameKey.String(val) +} + +// FaaSDocumentTime returns an attribute KeyValue conforming to the +// "faas.document.time" semantic conventions. It represents a string containing +// the time when the data was accessed in the [ISO 8601] format expressed in +// [UTC]. +// +// [ISO 8601]: https://www.iso.org/iso-8601-date-and-time-format.html +// [UTC]: https://www.w3.org/TR/NOTE-datetime +func FaaSDocumentTime(val string) attribute.KeyValue { + return FaaSDocumentTimeKey.String(val) +} + +// FaaSInstance returns an attribute KeyValue conforming to the "faas.instance" +// semantic conventions. It represents the execution environment ID as a string, +// that will be potentially reused for other invocations to the same +// function/function version. +func FaaSInstance(val string) attribute.KeyValue { + return FaaSInstanceKey.String(val) +} + +// FaaSInvocationID returns an attribute KeyValue conforming to the +// "faas.invocation_id" semantic conventions. It represents the invocation ID of +// the current function invocation. +func FaaSInvocationID(val string) attribute.KeyValue { + return FaaSInvocationIDKey.String(val) +} + +// FaaSInvokedName returns an attribute KeyValue conforming to the +// "faas.invoked_name" semantic conventions. It represents the name of the +// invoked function. +func FaaSInvokedName(val string) attribute.KeyValue { + return FaaSInvokedNameKey.String(val) +} + +// FaaSInvokedRegion returns an attribute KeyValue conforming to the +// "faas.invoked_region" semantic conventions. It represents the cloud region of +// the invoked function. +func FaaSInvokedRegion(val string) attribute.KeyValue { + return FaaSInvokedRegionKey.String(val) +} + +// FaaSMaxMemory returns an attribute KeyValue conforming to the +// "faas.max_memory" semantic conventions. It represents the amount of memory +// available to the serverless function converted to Bytes. +func FaaSMaxMemory(val int) attribute.KeyValue { + return FaaSMaxMemoryKey.Int(val) +} + +// FaaSName returns an attribute KeyValue conforming to the "faas.name" semantic +// conventions. It represents the name of the single function that this runtime +// instance executes. +func FaaSName(val string) attribute.KeyValue { + return FaaSNameKey.String(val) +} + +// FaaSTime returns an attribute KeyValue conforming to the "faas.time" semantic +// conventions. It represents a string containing the function invocation time in +// the [ISO 8601] format expressed in [UTC]. +// +// [ISO 8601]: https://www.iso.org/iso-8601-date-and-time-format.html +// [UTC]: https://www.w3.org/TR/NOTE-datetime +func FaaSTime(val string) attribute.KeyValue { + return FaaSTimeKey.String(val) +} + +// FaaSVersion returns an attribute KeyValue conforming to the "faas.version" +// semantic conventions. It represents the immutable version of the function +// being executed. +func FaaSVersion(val string) attribute.KeyValue { + return FaaSVersionKey.String(val) +} + +// Enum values for faas.document.operation +var ( + // When a new object is created. + // Stability: development + FaaSDocumentOperationInsert = FaaSDocumentOperationKey.String("insert") + // When an object is modified. + // Stability: development + FaaSDocumentOperationEdit = FaaSDocumentOperationKey.String("edit") + // When an object is deleted. + // Stability: development + FaaSDocumentOperationDelete = FaaSDocumentOperationKey.String("delete") +) + +// Enum values for faas.invoked_provider +var ( + // Alibaba Cloud + // Stability: development + FaaSInvokedProviderAlibabaCloud = FaaSInvokedProviderKey.String("alibaba_cloud") + // Amazon Web Services + // Stability: development + FaaSInvokedProviderAWS = FaaSInvokedProviderKey.String("aws") + // Microsoft Azure + // Stability: development + FaaSInvokedProviderAzure = FaaSInvokedProviderKey.String("azure") + // Google Cloud Platform + // Stability: development + FaaSInvokedProviderGCP = FaaSInvokedProviderKey.String("gcp") + // Tencent Cloud + // Stability: development + FaaSInvokedProviderTencentCloud = FaaSInvokedProviderKey.String("tencent_cloud") +) + +// Enum values for faas.trigger +var ( + // A response to some data source operation such as a database or filesystem + // read/write + // Stability: development + FaaSTriggerDatasource = FaaSTriggerKey.String("datasource") + // To provide an answer to an inbound HTTP request + // Stability: development + FaaSTriggerHTTP = FaaSTriggerKey.String("http") + // A function is set to be executed when messages are sent to a messaging system + // Stability: development + FaaSTriggerPubSub = FaaSTriggerKey.String("pubsub") + // A function is scheduled to be executed regularly + // Stability: development + FaaSTriggerTimer = FaaSTriggerKey.String("timer") + // If none of the others apply + // Stability: development + FaaSTriggerOther = FaaSTriggerKey.String("other") +) + +// Namespace: feature_flag +const ( + // FeatureFlagContextIDKey is the attribute Key conforming to the + // "feature_flag.context.id" semantic conventions. It represents the unique + // identifier for the flag evaluation context. For example, the targeting key. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "5157782b-2203-4c80-a857-dbbd5e7761db" + FeatureFlagContextIDKey = attribute.Key("feature_flag.context.id") + + // FeatureFlagKeyKey is the attribute Key conforming to the "feature_flag.key" + // semantic conventions. It represents the lookup key of the feature flag. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "logo-color" + FeatureFlagKeyKey = attribute.Key("feature_flag.key") + + // FeatureFlagProviderNameKey is the attribute Key conforming to the + // "feature_flag.provider.name" semantic conventions. It represents the + // identifies the feature flag provider. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Flag Manager" + FeatureFlagProviderNameKey = attribute.Key("feature_flag.provider.name") + + // FeatureFlagResultReasonKey is the attribute Key conforming to the + // "feature_flag.result.reason" semantic conventions. It represents the reason + // code which shows how a feature flag value was determined. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "static", "targeting_match", "error", "default" + FeatureFlagResultReasonKey = attribute.Key("feature_flag.result.reason") + + // FeatureFlagResultValueKey is the attribute Key conforming to the + // "feature_flag.result.value" semantic conventions. It represents the evaluated + // value of the feature flag. + // + // Type: any + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "#ff0000", true, 3 + // Note: With some feature flag providers, feature flag results can be quite + // large or contain private or sensitive details. + // Because of this, `feature_flag.result.variant` is often the preferred + // attribute if it is available. + // + // It may be desirable to redact or otherwise limit the size and scope of + // `feature_flag.result.value` if possible. + // Because the evaluated flag value is unstructured and may be any type, it is + // left to the instrumentation author to determine how best to achieve this. + FeatureFlagResultValueKey = attribute.Key("feature_flag.result.value") + + // FeatureFlagResultVariantKey is the attribute Key conforming to the + // "feature_flag.result.variant" semantic conventions. It represents a semantic + // identifier for an evaluated flag value. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "red", "true", "on" + // Note: A semantic identifier, commonly referred to as a variant, provides a + // means + // for referring to a value without including the value itself. This can + // provide additional context for understanding the meaning behind a value. + // For example, the variant `red` maybe be used for the value `#c05543`. + FeatureFlagResultVariantKey = attribute.Key("feature_flag.result.variant") + + // FeatureFlagSetIDKey is the attribute Key conforming to the + // "feature_flag.set.id" semantic conventions. It represents the identifier of + // the [flag set] to which the feature flag belongs. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "proj-1", "ab98sgs", "service1/dev" + // + // [flag set]: https://openfeature.dev/specification/glossary/#flag-set + FeatureFlagSetIDKey = attribute.Key("feature_flag.set.id") + + // FeatureFlagVersionKey is the attribute Key conforming to the + // "feature_flag.version" semantic conventions. It represents the version of the + // ruleset used during the evaluation. This may be any stable value which + // uniquely identifies the ruleset. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1", "01ABCDEF" + FeatureFlagVersionKey = attribute.Key("feature_flag.version") +) + +// FeatureFlagContextID returns an attribute KeyValue conforming to the +// "feature_flag.context.id" semantic conventions. It represents the unique +// identifier for the flag evaluation context. For example, the targeting key. +func FeatureFlagContextID(val string) attribute.KeyValue { + return FeatureFlagContextIDKey.String(val) +} + +// FeatureFlagKey returns an attribute KeyValue conforming to the +// "feature_flag.key" semantic conventions. It represents the lookup key of the +// feature flag. +func FeatureFlagKey(val string) attribute.KeyValue { + return FeatureFlagKeyKey.String(val) +} + +// FeatureFlagProviderName returns an attribute KeyValue conforming to the +// "feature_flag.provider.name" semantic conventions. It represents the +// identifies the feature flag provider. +func FeatureFlagProviderName(val string) attribute.KeyValue { + return FeatureFlagProviderNameKey.String(val) +} + +// FeatureFlagResultVariant returns an attribute KeyValue conforming to the +// "feature_flag.result.variant" semantic conventions. It represents a semantic +// identifier for an evaluated flag value. +func FeatureFlagResultVariant(val string) attribute.KeyValue { + return FeatureFlagResultVariantKey.String(val) +} + +// FeatureFlagSetID returns an attribute KeyValue conforming to the +// "feature_flag.set.id" semantic conventions. It represents the identifier of +// the [flag set] to which the feature flag belongs. +// +// [flag set]: https://openfeature.dev/specification/glossary/#flag-set +func FeatureFlagSetID(val string) attribute.KeyValue { + return FeatureFlagSetIDKey.String(val) +} + +// FeatureFlagVersion returns an attribute KeyValue conforming to the +// "feature_flag.version" semantic conventions. It represents the version of the +// ruleset used during the evaluation. This may be any stable value which +// uniquely identifies the ruleset. +func FeatureFlagVersion(val string) attribute.KeyValue { + return FeatureFlagVersionKey.String(val) +} + +// Enum values for feature_flag.result.reason +var ( + // The resolved value is static (no dynamic evaluation). + // Stability: development + FeatureFlagResultReasonStatic = FeatureFlagResultReasonKey.String("static") + // The resolved value fell back to a pre-configured value (no dynamic evaluation + // occurred or dynamic evaluation yielded no result). + // Stability: development + FeatureFlagResultReasonDefault = FeatureFlagResultReasonKey.String("default") + // The resolved value was the result of a dynamic evaluation, such as a rule or + // specific user-targeting. + // Stability: development + FeatureFlagResultReasonTargetingMatch = FeatureFlagResultReasonKey.String("targeting_match") + // The resolved value was the result of pseudorandom assignment. + // Stability: development + FeatureFlagResultReasonSplit = FeatureFlagResultReasonKey.String("split") + // The resolved value was retrieved from cache. + // Stability: development + FeatureFlagResultReasonCached = FeatureFlagResultReasonKey.String("cached") + // The resolved value was the result of the flag being disabled in the + // management system. + // Stability: development + FeatureFlagResultReasonDisabled = FeatureFlagResultReasonKey.String("disabled") + // The reason for the resolved value could not be determined. + // Stability: development + FeatureFlagResultReasonUnknown = FeatureFlagResultReasonKey.String("unknown") + // The resolved value is non-authoritative or possibly out of date + // Stability: development + FeatureFlagResultReasonStale = FeatureFlagResultReasonKey.String("stale") + // The resolved value was the result of an error. + // Stability: development + FeatureFlagResultReasonError = FeatureFlagResultReasonKey.String("error") +) + +// Namespace: file +const ( + // FileAccessedKey is the attribute Key conforming to the "file.accessed" + // semantic conventions. It represents the time when the file was last accessed, + // in ISO 8601 format. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2021-01-01T12:00:00Z" + // Note: This attribute might not be supported by some file systems — NFS, + // FAT32, in embedded OS, etc. + FileAccessedKey = attribute.Key("file.accessed") + + // FileAttributesKey is the attribute Key conforming to the "file.attributes" + // semantic conventions. It represents the array of file attributes. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "readonly", "hidden" + // Note: Attributes names depend on the OS or file system. Here’s a + // non-exhaustive list of values expected for this attribute: `archive`, + // `compressed`, `directory`, `encrypted`, `execute`, `hidden`, `immutable`, + // `journaled`, `read`, `readonly`, `symbolic link`, `system`, `temporary`, + // `write`. + FileAttributesKey = attribute.Key("file.attributes") + + // FileChangedKey is the attribute Key conforming to the "file.changed" semantic + // conventions. It represents the time when the file attributes or metadata was + // last changed, in ISO 8601 format. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2021-01-01T12:00:00Z" + // Note: `file.changed` captures the time when any of the file's properties or + // attributes (including the content) are changed, while `file.modified` + // captures the timestamp when the file content is modified. + FileChangedKey = attribute.Key("file.changed") + + // FileCreatedKey is the attribute Key conforming to the "file.created" semantic + // conventions. It represents the time when the file was created, in ISO 8601 + // format. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2021-01-01T12:00:00Z" + // Note: This attribute might not be supported by some file systems — NFS, + // FAT32, in embedded OS, etc. + FileCreatedKey = attribute.Key("file.created") + + // FileDirectoryKey is the attribute Key conforming to the "file.directory" + // semantic conventions. It represents the directory where the file is located. + // It should include the drive letter, when appropriate. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "/home/user", "C:\Program Files\MyApp" + FileDirectoryKey = attribute.Key("file.directory") + + // FileExtensionKey is the attribute Key conforming to the "file.extension" + // semantic conventions. It represents the file extension, excluding the leading + // dot. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "png", "gz" + // Note: When the file name has multiple extensions (example.tar.gz), only the + // last one should be captured ("gz", not "tar.gz"). + FileExtensionKey = attribute.Key("file.extension") + + // FileForkNameKey is the attribute Key conforming to the "file.fork_name" + // semantic conventions. It represents the name of the fork. A fork is + // additional data associated with a filesystem object. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Zone.Identifer" + // Note: On Linux, a resource fork is used to store additional data with a + // filesystem object. A file always has at least one fork for the data portion, + // and additional forks may exist. + // On NTFS, this is analogous to an Alternate Data Stream (ADS), and the default + // data stream for a file is just called $DATA. Zone.Identifier is commonly used + // by Windows to track contents downloaded from the Internet. An ADS is + // typically of the form: C:\path\to\filename.extension:some_fork_name, and + // some_fork_name is the value that should populate `fork_name`. + // `filename.extension` should populate `file.name`, and `extension` should + // populate `file.extension`. The full path, `file.path`, will include the fork + // name. + FileForkNameKey = attribute.Key("file.fork_name") + + // FileGroupIDKey is the attribute Key conforming to the "file.group.id" + // semantic conventions. It represents the primary Group ID (GID) of the file. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1000" + FileGroupIDKey = attribute.Key("file.group.id") + + // FileGroupNameKey is the attribute Key conforming to the "file.group.name" + // semantic conventions. It represents the primary group name of the file. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "users" + FileGroupNameKey = attribute.Key("file.group.name") + + // FileInodeKey is the attribute Key conforming to the "file.inode" semantic + // conventions. It represents the inode representing the file in the filesystem. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "256383" + FileInodeKey = attribute.Key("file.inode") + + // FileModeKey is the attribute Key conforming to the "file.mode" semantic + // conventions. It represents the mode of the file in octal representation. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "0640" + FileModeKey = attribute.Key("file.mode") + + // FileModifiedKey is the attribute Key conforming to the "file.modified" + // semantic conventions. It represents the time when the file content was last + // modified, in ISO 8601 format. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2021-01-01T12:00:00Z" + FileModifiedKey = attribute.Key("file.modified") + + // FileNameKey is the attribute Key conforming to the "file.name" semantic + // conventions. It represents the name of the file including the extension, + // without the directory. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "example.png" + FileNameKey = attribute.Key("file.name") + + // FileOwnerIDKey is the attribute Key conforming to the "file.owner.id" + // semantic conventions. It represents the user ID (UID) or security identifier + // (SID) of the file owner. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1000" + FileOwnerIDKey = attribute.Key("file.owner.id") + + // FileOwnerNameKey is the attribute Key conforming to the "file.owner.name" + // semantic conventions. It represents the username of the file owner. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "root" + FileOwnerNameKey = attribute.Key("file.owner.name") + + // FilePathKey is the attribute Key conforming to the "file.path" semantic + // conventions. It represents the full path to the file, including the file + // name. It should include the drive letter, when appropriate. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "/home/alice/example.png", "C:\Program Files\MyApp\myapp.exe" + FilePathKey = attribute.Key("file.path") + + // FileSizeKey is the attribute Key conforming to the "file.size" semantic + // conventions. It represents the file size in bytes. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + FileSizeKey = attribute.Key("file.size") + + // FileSymbolicLinkTargetPathKey is the attribute Key conforming to the + // "file.symbolic_link.target_path" semantic conventions. It represents the path + // to the target of a symbolic link. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "/usr/bin/python3" + // Note: This attribute is only applicable to symbolic links. + FileSymbolicLinkTargetPathKey = attribute.Key("file.symbolic_link.target_path") +) + +// FileAccessed returns an attribute KeyValue conforming to the "file.accessed" +// semantic conventions. It represents the time when the file was last accessed, +// in ISO 8601 format. +func FileAccessed(val string) attribute.KeyValue { + return FileAccessedKey.String(val) +} + +// FileAttributes returns an attribute KeyValue conforming to the +// "file.attributes" semantic conventions. It represents the array of file +// attributes. +func FileAttributes(val ...string) attribute.KeyValue { + return FileAttributesKey.StringSlice(val) +} + +// FileChanged returns an attribute KeyValue conforming to the "file.changed" +// semantic conventions. It represents the time when the file attributes or +// metadata was last changed, in ISO 8601 format. +func FileChanged(val string) attribute.KeyValue { + return FileChangedKey.String(val) +} + +// FileCreated returns an attribute KeyValue conforming to the "file.created" +// semantic conventions. It represents the time when the file was created, in ISO +// 8601 format. +func FileCreated(val string) attribute.KeyValue { + return FileCreatedKey.String(val) +} + +// FileDirectory returns an attribute KeyValue conforming to the "file.directory" +// semantic conventions. It represents the directory where the file is located. +// It should include the drive letter, when appropriate. +func FileDirectory(val string) attribute.KeyValue { + return FileDirectoryKey.String(val) +} + +// FileExtension returns an attribute KeyValue conforming to the "file.extension" +// semantic conventions. It represents the file extension, excluding the leading +// dot. +func FileExtension(val string) attribute.KeyValue { + return FileExtensionKey.String(val) +} + +// FileForkName returns an attribute KeyValue conforming to the "file.fork_name" +// semantic conventions. It represents the name of the fork. A fork is additional +// data associated with a filesystem object. +func FileForkName(val string) attribute.KeyValue { + return FileForkNameKey.String(val) +} + +// FileGroupID returns an attribute KeyValue conforming to the "file.group.id" +// semantic conventions. It represents the primary Group ID (GID) of the file. +func FileGroupID(val string) attribute.KeyValue { + return FileGroupIDKey.String(val) +} + +// FileGroupName returns an attribute KeyValue conforming to the +// "file.group.name" semantic conventions. It represents the primary group name +// of the file. +func FileGroupName(val string) attribute.KeyValue { + return FileGroupNameKey.String(val) +} + +// FileInode returns an attribute KeyValue conforming to the "file.inode" +// semantic conventions. It represents the inode representing the file in the +// filesystem. +func FileInode(val string) attribute.KeyValue { + return FileInodeKey.String(val) +} + +// FileMode returns an attribute KeyValue conforming to the "file.mode" semantic +// conventions. It represents the mode of the file in octal representation. +func FileMode(val string) attribute.KeyValue { + return FileModeKey.String(val) +} + +// FileModified returns an attribute KeyValue conforming to the "file.modified" +// semantic conventions. It represents the time when the file content was last +// modified, in ISO 8601 format. +func FileModified(val string) attribute.KeyValue { + return FileModifiedKey.String(val) +} + +// FileName returns an attribute KeyValue conforming to the "file.name" semantic +// conventions. It represents the name of the file including the extension, +// without the directory. +func FileName(val string) attribute.KeyValue { + return FileNameKey.String(val) +} + +// FileOwnerID returns an attribute KeyValue conforming to the "file.owner.id" +// semantic conventions. It represents the user ID (UID) or security identifier +// (SID) of the file owner. +func FileOwnerID(val string) attribute.KeyValue { + return FileOwnerIDKey.String(val) +} + +// FileOwnerName returns an attribute KeyValue conforming to the +// "file.owner.name" semantic conventions. It represents the username of the file +// owner. +func FileOwnerName(val string) attribute.KeyValue { + return FileOwnerNameKey.String(val) +} + +// FilePath returns an attribute KeyValue conforming to the "file.path" semantic +// conventions. It represents the full path to the file, including the file name. +// It should include the drive letter, when appropriate. +func FilePath(val string) attribute.KeyValue { + return FilePathKey.String(val) +} + +// FileSize returns an attribute KeyValue conforming to the "file.size" semantic +// conventions. It represents the file size in bytes. +func FileSize(val int) attribute.KeyValue { + return FileSizeKey.Int(val) +} + +// FileSymbolicLinkTargetPath returns an attribute KeyValue conforming to the +// "file.symbolic_link.target_path" semantic conventions. It represents the path +// to the target of a symbolic link. +func FileSymbolicLinkTargetPath(val string) attribute.KeyValue { + return FileSymbolicLinkTargetPathKey.String(val) +} + +// Namespace: gcp +const ( + // GCPAppHubApplicationContainerKey is the attribute Key conforming to the + // "gcp.apphub.application.container" semantic conventions. It represents the + // container within GCP where the AppHub application is defined. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "projects/my-container-project" + GCPAppHubApplicationContainerKey = attribute.Key("gcp.apphub.application.container") + + // GCPAppHubApplicationIDKey is the attribute Key conforming to the + // "gcp.apphub.application.id" semantic conventions. It represents the name of + // the application as configured in AppHub. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-application" + GCPAppHubApplicationIDKey = attribute.Key("gcp.apphub.application.id") + + // GCPAppHubApplicationLocationKey is the attribute Key conforming to the + // "gcp.apphub.application.location" semantic conventions. It represents the GCP + // zone or region where the application is defined. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "us-central1" + GCPAppHubApplicationLocationKey = attribute.Key("gcp.apphub.application.location") + + // GCPAppHubServiceCriticalityTypeKey is the attribute Key conforming to the + // "gcp.apphub.service.criticality_type" semantic conventions. It represents the + // criticality of a service indicates its importance to the business. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: [See AppHub type enum] + // + // [See AppHub type enum]: https://cloud.google.com/app-hub/docs/reference/rest/v1/Attributes#type + GCPAppHubServiceCriticalityTypeKey = attribute.Key("gcp.apphub.service.criticality_type") + + // GCPAppHubServiceEnvironmentTypeKey is the attribute Key conforming to the + // "gcp.apphub.service.environment_type" semantic conventions. It represents the + // environment of a service is the stage of a software lifecycle. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: [See AppHub environment type] + // + // [See AppHub environment type]: https://cloud.google.com/app-hub/docs/reference/rest/v1/Attributes#type_1 + GCPAppHubServiceEnvironmentTypeKey = attribute.Key("gcp.apphub.service.environment_type") + + // GCPAppHubServiceIDKey is the attribute Key conforming to the + // "gcp.apphub.service.id" semantic conventions. It represents the name of the + // service as configured in AppHub. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-service" + GCPAppHubServiceIDKey = attribute.Key("gcp.apphub.service.id") + + // GCPAppHubWorkloadCriticalityTypeKey is the attribute Key conforming to the + // "gcp.apphub.workload.criticality_type" semantic conventions. It represents + // the criticality of a workload indicates its importance to the business. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: [See AppHub type enum] + // + // [See AppHub type enum]: https://cloud.google.com/app-hub/docs/reference/rest/v1/Attributes#type + GCPAppHubWorkloadCriticalityTypeKey = attribute.Key("gcp.apphub.workload.criticality_type") + + // GCPAppHubWorkloadEnvironmentTypeKey is the attribute Key conforming to the + // "gcp.apphub.workload.environment_type" semantic conventions. It represents + // the environment of a workload is the stage of a software lifecycle. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: [See AppHub environment type] + // + // [See AppHub environment type]: https://cloud.google.com/app-hub/docs/reference/rest/v1/Attributes#type_1 + GCPAppHubWorkloadEnvironmentTypeKey = attribute.Key("gcp.apphub.workload.environment_type") + + // GCPAppHubWorkloadIDKey is the attribute Key conforming to the + // "gcp.apphub.workload.id" semantic conventions. It represents the name of the + // workload as configured in AppHub. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-workload" + GCPAppHubWorkloadIDKey = attribute.Key("gcp.apphub.workload.id") + + // GCPClientServiceKey is the attribute Key conforming to the + // "gcp.client.service" semantic conventions. It represents the identifies the + // Google Cloud service for which the official client library is intended. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "appengine", "run", "firestore", "alloydb", "spanner" + // Note: Intended to be a stable identifier for Google Cloud client libraries + // that is uniform across implementation languages. The value should be derived + // from the canonical service domain for the service; for example, + // 'foo.googleapis.com' should result in a value of 'foo'. + GCPClientServiceKey = attribute.Key("gcp.client.service") + + // GCPCloudRunJobExecutionKey is the attribute Key conforming to the + // "gcp.cloud_run.job.execution" semantic conventions. It represents the name of + // the Cloud Run [execution] being run for the Job, as set by the + // [`CLOUD_RUN_EXECUTION`] environment variable. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "job-name-xxxx", "sample-job-mdw84" + // + // [execution]: https://cloud.google.com/run/docs/managing/job-executions + // [`CLOUD_RUN_EXECUTION`]: https://cloud.google.com/run/docs/container-contract#jobs-env-vars + GCPCloudRunJobExecutionKey = attribute.Key("gcp.cloud_run.job.execution") + + // GCPCloudRunJobTaskIndexKey is the attribute Key conforming to the + // "gcp.cloud_run.job.task_index" semantic conventions. It represents the index + // for a task within an execution as provided by the [`CLOUD_RUN_TASK_INDEX`] + // environment variable. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 0, 1 + // + // [`CLOUD_RUN_TASK_INDEX`]: https://cloud.google.com/run/docs/container-contract#jobs-env-vars + GCPCloudRunJobTaskIndexKey = attribute.Key("gcp.cloud_run.job.task_index") + + // GCPGCEInstanceHostnameKey is the attribute Key conforming to the + // "gcp.gce.instance.hostname" semantic conventions. It represents the hostname + // of a GCE instance. This is the full value of the default or [custom hostname] + // . + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-host1234.example.com", + // "sample-vm.us-west1-b.c.my-project.internal" + // + // [custom hostname]: https://cloud.google.com/compute/docs/instances/custom-hostname-vm + GCPGCEInstanceHostnameKey = attribute.Key("gcp.gce.instance.hostname") + + // GCPGCEInstanceNameKey is the attribute Key conforming to the + // "gcp.gce.instance.name" semantic conventions. It represents the instance name + // of a GCE instance. This is the value provided by `host.name`, the visible + // name of the instance in the Cloud Console UI, and the prefix for the default + // hostname of the instance as defined by the [default internal DNS name]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "instance-1", "my-vm-name" + // + // [default internal DNS name]: https://cloud.google.com/compute/docs/internal-dns#instance-fully-qualified-domain-names + GCPGCEInstanceNameKey = attribute.Key("gcp.gce.instance.name") +) + +// GCPAppHubApplicationContainer returns an attribute KeyValue conforming to the +// "gcp.apphub.application.container" semantic conventions. It represents the +// container within GCP where the AppHub application is defined. +func GCPAppHubApplicationContainer(val string) attribute.KeyValue { + return GCPAppHubApplicationContainerKey.String(val) +} + +// GCPAppHubApplicationID returns an attribute KeyValue conforming to the +// "gcp.apphub.application.id" semantic conventions. It represents the name of +// the application as configured in AppHub. +func GCPAppHubApplicationID(val string) attribute.KeyValue { + return GCPAppHubApplicationIDKey.String(val) +} + +// GCPAppHubApplicationLocation returns an attribute KeyValue conforming to the +// "gcp.apphub.application.location" semantic conventions. It represents the GCP +// zone or region where the application is defined. +func GCPAppHubApplicationLocation(val string) attribute.KeyValue { + return GCPAppHubApplicationLocationKey.String(val) +} + +// GCPAppHubServiceID returns an attribute KeyValue conforming to the +// "gcp.apphub.service.id" semantic conventions. It represents the name of the +// service as configured in AppHub. +func GCPAppHubServiceID(val string) attribute.KeyValue { + return GCPAppHubServiceIDKey.String(val) +} + +// GCPAppHubWorkloadID returns an attribute KeyValue conforming to the +// "gcp.apphub.workload.id" semantic conventions. It represents the name of the +// workload as configured in AppHub. +func GCPAppHubWorkloadID(val string) attribute.KeyValue { + return GCPAppHubWorkloadIDKey.String(val) +} + +// GCPClientService returns an attribute KeyValue conforming to the +// "gcp.client.service" semantic conventions. It represents the identifies the +// Google Cloud service for which the official client library is intended. +func GCPClientService(val string) attribute.KeyValue { + return GCPClientServiceKey.String(val) +} + +// GCPCloudRunJobExecution returns an attribute KeyValue conforming to the +// "gcp.cloud_run.job.execution" semantic conventions. It represents the name of +// the Cloud Run [execution] being run for the Job, as set by the +// [`CLOUD_RUN_EXECUTION`] environment variable. +// +// [execution]: https://cloud.google.com/run/docs/managing/job-executions +// [`CLOUD_RUN_EXECUTION`]: https://cloud.google.com/run/docs/container-contract#jobs-env-vars +func GCPCloudRunJobExecution(val string) attribute.KeyValue { + return GCPCloudRunJobExecutionKey.String(val) +} + +// GCPCloudRunJobTaskIndex returns an attribute KeyValue conforming to the +// "gcp.cloud_run.job.task_index" semantic conventions. It represents the index +// for a task within an execution as provided by the [`CLOUD_RUN_TASK_INDEX`] +// environment variable. +// +// [`CLOUD_RUN_TASK_INDEX`]: https://cloud.google.com/run/docs/container-contract#jobs-env-vars +func GCPCloudRunJobTaskIndex(val int) attribute.KeyValue { + return GCPCloudRunJobTaskIndexKey.Int(val) +} + +// GCPGCEInstanceHostname returns an attribute KeyValue conforming to the +// "gcp.gce.instance.hostname" semantic conventions. It represents the hostname +// of a GCE instance. This is the full value of the default or [custom hostname] +// . +// +// [custom hostname]: https://cloud.google.com/compute/docs/instances/custom-hostname-vm +func GCPGCEInstanceHostname(val string) attribute.KeyValue { + return GCPGCEInstanceHostnameKey.String(val) +} + +// GCPGCEInstanceName returns an attribute KeyValue conforming to the +// "gcp.gce.instance.name" semantic conventions. It represents the instance name +// of a GCE instance. This is the value provided by `host.name`, the visible name +// of the instance in the Cloud Console UI, and the prefix for the default +// hostname of the instance as defined by the [default internal DNS name]. +// +// [default internal DNS name]: https://cloud.google.com/compute/docs/internal-dns#instance-fully-qualified-domain-names +func GCPGCEInstanceName(val string) attribute.KeyValue { + return GCPGCEInstanceNameKey.String(val) +} + +// Enum values for gcp.apphub.service.criticality_type +var ( + // Mission critical service. + // Stability: development + GCPAppHubServiceCriticalityTypeMissionCritical = GCPAppHubServiceCriticalityTypeKey.String("MISSION_CRITICAL") + // High impact. + // Stability: development + GCPAppHubServiceCriticalityTypeHigh = GCPAppHubServiceCriticalityTypeKey.String("HIGH") + // Medium impact. + // Stability: development + GCPAppHubServiceCriticalityTypeMedium = GCPAppHubServiceCriticalityTypeKey.String("MEDIUM") + // Low impact. + // Stability: development + GCPAppHubServiceCriticalityTypeLow = GCPAppHubServiceCriticalityTypeKey.String("LOW") +) + +// Enum values for gcp.apphub.service.environment_type +var ( + // Production environment. + // Stability: development + GCPAppHubServiceEnvironmentTypeProduction = GCPAppHubServiceEnvironmentTypeKey.String("PRODUCTION") + // Staging environment. + // Stability: development + GCPAppHubServiceEnvironmentTypeStaging = GCPAppHubServiceEnvironmentTypeKey.String("STAGING") + // Test environment. + // Stability: development + GCPAppHubServiceEnvironmentTypeTest = GCPAppHubServiceEnvironmentTypeKey.String("TEST") + // Development environment. + // Stability: development + GCPAppHubServiceEnvironmentTypeDevelopment = GCPAppHubServiceEnvironmentTypeKey.String("DEVELOPMENT") +) + +// Enum values for gcp.apphub.workload.criticality_type +var ( + // Mission critical service. + // Stability: development + GCPAppHubWorkloadCriticalityTypeMissionCritical = GCPAppHubWorkloadCriticalityTypeKey.String("MISSION_CRITICAL") + // High impact. + // Stability: development + GCPAppHubWorkloadCriticalityTypeHigh = GCPAppHubWorkloadCriticalityTypeKey.String("HIGH") + // Medium impact. + // Stability: development + GCPAppHubWorkloadCriticalityTypeMedium = GCPAppHubWorkloadCriticalityTypeKey.String("MEDIUM") + // Low impact. + // Stability: development + GCPAppHubWorkloadCriticalityTypeLow = GCPAppHubWorkloadCriticalityTypeKey.String("LOW") +) + +// Enum values for gcp.apphub.workload.environment_type +var ( + // Production environment. + // Stability: development + GCPAppHubWorkloadEnvironmentTypeProduction = GCPAppHubWorkloadEnvironmentTypeKey.String("PRODUCTION") + // Staging environment. + // Stability: development + GCPAppHubWorkloadEnvironmentTypeStaging = GCPAppHubWorkloadEnvironmentTypeKey.String("STAGING") + // Test environment. + // Stability: development + GCPAppHubWorkloadEnvironmentTypeTest = GCPAppHubWorkloadEnvironmentTypeKey.String("TEST") + // Development environment. + // Stability: development + GCPAppHubWorkloadEnvironmentTypeDevelopment = GCPAppHubWorkloadEnvironmentTypeKey.String("DEVELOPMENT") +) + +// Namespace: gen_ai +const ( + // GenAIAgentDescriptionKey is the attribute Key conforming to the + // "gen_ai.agent.description" semantic conventions. It represents the free-form + // description of the GenAI agent provided by the application. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Helps with math problems", "Generates fiction stories" + GenAIAgentDescriptionKey = attribute.Key("gen_ai.agent.description") + + // GenAIAgentIDKey is the attribute Key conforming to the "gen_ai.agent.id" + // semantic conventions. It represents the unique identifier of the GenAI agent. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "asst_5j66UpCpwteGg4YSxUnt7lPY" + GenAIAgentIDKey = attribute.Key("gen_ai.agent.id") + + // GenAIAgentNameKey is the attribute Key conforming to the "gen_ai.agent.name" + // semantic conventions. It represents the human-readable name of the GenAI + // agent provided by the application. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Math Tutor", "Fiction Writer" + GenAIAgentNameKey = attribute.Key("gen_ai.agent.name") + + // GenAIConversationIDKey is the attribute Key conforming to the + // "gen_ai.conversation.id" semantic conventions. It represents the unique + // identifier for a conversation (session, thread), used to store and correlate + // messages within this conversation. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "conv_5j66UpCpwteGg4YSxUnt7lPY" + GenAIConversationIDKey = attribute.Key("gen_ai.conversation.id") + + // GenAIDataSourceIDKey is the attribute Key conforming to the + // "gen_ai.data_source.id" semantic conventions. It represents the data source + // identifier. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "H7STPQYOND" + // Note: Data sources are used by AI agents and RAG applications to store + // grounding data. A data source may be an external database, object store, + // document collection, website, or any other storage system used by the GenAI + // agent or application. The `gen_ai.data_source.id` SHOULD match the identifier + // used by the GenAI system rather than a name specific to the external storage, + // such as a database or object store. Semantic conventions referencing + // `gen_ai.data_source.id` MAY also leverage additional attributes, such as + // `db.*`, to further identify and describe the data source. + GenAIDataSourceIDKey = attribute.Key("gen_ai.data_source.id") + + // GenAIOpenAIRequestServiceTierKey is the attribute Key conforming to the + // "gen_ai.openai.request.service_tier" semantic conventions. It represents the + // service tier requested. May be a specific tier, default, or auto. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "auto", "default" + GenAIOpenAIRequestServiceTierKey = attribute.Key("gen_ai.openai.request.service_tier") + + // GenAIOpenAIResponseServiceTierKey is the attribute Key conforming to the + // "gen_ai.openai.response.service_tier" semantic conventions. It represents the + // service tier used for the response. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "scale", "default" + GenAIOpenAIResponseServiceTierKey = attribute.Key("gen_ai.openai.response.service_tier") + + // GenAIOpenAIResponseSystemFingerprintKey is the attribute Key conforming to + // the "gen_ai.openai.response.system_fingerprint" semantic conventions. It + // represents a fingerprint to track any eventual change in the Generative AI + // environment. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "fp_44709d6fcb" + GenAIOpenAIResponseSystemFingerprintKey = attribute.Key("gen_ai.openai.response.system_fingerprint") + + // GenAIOperationNameKey is the attribute Key conforming to the + // "gen_ai.operation.name" semantic conventions. It represents the name of the + // operation being performed. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: If one of the predefined values applies, but specific system uses a + // different name it's RECOMMENDED to document it in the semantic conventions + // for specific GenAI system and use system-specific name in the + // instrumentation. If a different name is not documented, instrumentation + // libraries SHOULD use applicable predefined value. + GenAIOperationNameKey = attribute.Key("gen_ai.operation.name") + + // GenAIOutputTypeKey is the attribute Key conforming to the + // "gen_ai.output.type" semantic conventions. It represents the represents the + // content type requested by the client. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: This attribute SHOULD be used when the client requests output of a + // specific type. The model may return zero or more outputs of this type. + // This attribute specifies the output modality and not the actual output + // format. For example, if an image is requested, the actual output could be a + // URL pointing to an image file. + // Additional output format details may be recorded in the future in the + // `gen_ai.output.{type}.*` attributes. + GenAIOutputTypeKey = attribute.Key("gen_ai.output.type") + + // GenAIRequestChoiceCountKey is the attribute Key conforming to the + // "gen_ai.request.choice.count" semantic conventions. It represents the target + // number of candidate completions to return. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 3 + GenAIRequestChoiceCountKey = attribute.Key("gen_ai.request.choice.count") + + // GenAIRequestEncodingFormatsKey is the attribute Key conforming to the + // "gen_ai.request.encoding_formats" semantic conventions. It represents the + // encoding formats requested in an embeddings operation, if specified. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "base64"], ["float", "binary" + // Note: In some GenAI systems the encoding formats are called embedding types. + // Also, some GenAI systems only accept a single format per request. + GenAIRequestEncodingFormatsKey = attribute.Key("gen_ai.request.encoding_formats") + + // GenAIRequestFrequencyPenaltyKey is the attribute Key conforming to the + // "gen_ai.request.frequency_penalty" semantic conventions. It represents the + // frequency penalty setting for the GenAI request. + // + // Type: double + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 0.1 + GenAIRequestFrequencyPenaltyKey = attribute.Key("gen_ai.request.frequency_penalty") + + // GenAIRequestMaxTokensKey is the attribute Key conforming to the + // "gen_ai.request.max_tokens" semantic conventions. It represents the maximum + // number of tokens the model generates for a request. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 100 + GenAIRequestMaxTokensKey = attribute.Key("gen_ai.request.max_tokens") + + // GenAIRequestModelKey is the attribute Key conforming to the + // "gen_ai.request.model" semantic conventions. It represents the name of the + // GenAI model a request is being made to. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: gpt-4 + GenAIRequestModelKey = attribute.Key("gen_ai.request.model") + + // GenAIRequestPresencePenaltyKey is the attribute Key conforming to the + // "gen_ai.request.presence_penalty" semantic conventions. It represents the + // presence penalty setting for the GenAI request. + // + // Type: double + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 0.1 + GenAIRequestPresencePenaltyKey = attribute.Key("gen_ai.request.presence_penalty") + + // GenAIRequestSeedKey is the attribute Key conforming to the + // "gen_ai.request.seed" semantic conventions. It represents the requests with + // same seed value more likely to return same result. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 100 + GenAIRequestSeedKey = attribute.Key("gen_ai.request.seed") + + // GenAIRequestStopSequencesKey is the attribute Key conforming to the + // "gen_ai.request.stop_sequences" semantic conventions. It represents the list + // of sequences that the model will use to stop generating further tokens. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "forest", "lived" + GenAIRequestStopSequencesKey = attribute.Key("gen_ai.request.stop_sequences") + + // GenAIRequestTemperatureKey is the attribute Key conforming to the + // "gen_ai.request.temperature" semantic conventions. It represents the + // temperature setting for the GenAI request. + // + // Type: double + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 0.0 + GenAIRequestTemperatureKey = attribute.Key("gen_ai.request.temperature") + + // GenAIRequestTopKKey is the attribute Key conforming to the + // "gen_ai.request.top_k" semantic conventions. It represents the top_k sampling + // setting for the GenAI request. + // + // Type: double + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1.0 + GenAIRequestTopKKey = attribute.Key("gen_ai.request.top_k") + + // GenAIRequestTopPKey is the attribute Key conforming to the + // "gen_ai.request.top_p" semantic conventions. It represents the top_p sampling + // setting for the GenAI request. + // + // Type: double + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1.0 + GenAIRequestTopPKey = attribute.Key("gen_ai.request.top_p") + + // GenAIResponseFinishReasonsKey is the attribute Key conforming to the + // "gen_ai.response.finish_reasons" semantic conventions. It represents the + // array of reasons the model stopped generating tokens, corresponding to each + // generation received. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "stop"], ["stop", "length" + GenAIResponseFinishReasonsKey = attribute.Key("gen_ai.response.finish_reasons") + + // GenAIResponseIDKey is the attribute Key conforming to the + // "gen_ai.response.id" semantic conventions. It represents the unique + // identifier for the completion. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "chatcmpl-123" + GenAIResponseIDKey = attribute.Key("gen_ai.response.id") + + // GenAIResponseModelKey is the attribute Key conforming to the + // "gen_ai.response.model" semantic conventions. It represents the name of the + // model that generated the response. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "gpt-4-0613" + GenAIResponseModelKey = attribute.Key("gen_ai.response.model") + + // GenAISystemKey is the attribute Key conforming to the "gen_ai.system" + // semantic conventions. It represents the Generative AI product as identified + // by the client or server instrumentation. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: openai + // Note: The `gen_ai.system` describes a family of GenAI models with specific + // model identified + // by `gen_ai.request.model` and `gen_ai.response.model` attributes. + // + // The actual GenAI product may differ from the one identified by the client. + // Multiple systems, including Azure OpenAI and Gemini, are accessible by OpenAI + // client + // libraries. In such cases, the `gen_ai.system` is set to `openai` based on the + // instrumentation's best knowledge, instead of the actual system. The + // `server.address` + // attribute may help identify the actual system in use for `openai`. + // + // For custom model, a custom friendly name SHOULD be used. + // If none of these options apply, the `gen_ai.system` SHOULD be set to `_OTHER` + // . + GenAISystemKey = attribute.Key("gen_ai.system") + + // GenAITokenTypeKey is the attribute Key conforming to the "gen_ai.token.type" + // semantic conventions. It represents the type of token being counted. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "input", "output" + GenAITokenTypeKey = attribute.Key("gen_ai.token.type") + + // GenAIToolCallIDKey is the attribute Key conforming to the + // "gen_ai.tool.call.id" semantic conventions. It represents the tool call + // identifier. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "call_mszuSIzqtI65i1wAUOE8w5H4" + GenAIToolCallIDKey = attribute.Key("gen_ai.tool.call.id") + + // GenAIToolDescriptionKey is the attribute Key conforming to the + // "gen_ai.tool.description" semantic conventions. It represents the tool + // description. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Multiply two numbers" + GenAIToolDescriptionKey = attribute.Key("gen_ai.tool.description") + + // GenAIToolNameKey is the attribute Key conforming to the "gen_ai.tool.name" + // semantic conventions. It represents the name of the tool utilized by the + // agent. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Flights" + GenAIToolNameKey = attribute.Key("gen_ai.tool.name") + + // GenAIToolTypeKey is the attribute Key conforming to the "gen_ai.tool.type" + // semantic conventions. It represents the type of the tool utilized by the + // agent. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "function", "extension", "datastore" + // Note: Extension: A tool executed on the agent-side to directly call external + // APIs, bridging the gap between the agent and real-world systems. + // Agent-side operations involve actions that are performed by the agent on the + // server or within the agent's controlled environment. + // Function: A tool executed on the client-side, where the agent generates + // parameters for a predefined function, and the client executes the logic. + // Client-side operations are actions taken on the user's end or within the + // client application. + // Datastore: A tool used by the agent to access and query structured or + // unstructured external data for retrieval-augmented tasks or knowledge + // updates. + GenAIToolTypeKey = attribute.Key("gen_ai.tool.type") + + // GenAIUsageInputTokensKey is the attribute Key conforming to the + // "gen_ai.usage.input_tokens" semantic conventions. It represents the number of + // tokens used in the GenAI input (prompt). + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 100 + GenAIUsageInputTokensKey = attribute.Key("gen_ai.usage.input_tokens") + + // GenAIUsageOutputTokensKey is the attribute Key conforming to the + // "gen_ai.usage.output_tokens" semantic conventions. It represents the number + // of tokens used in the GenAI response (completion). + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 180 + GenAIUsageOutputTokensKey = attribute.Key("gen_ai.usage.output_tokens") +) + +// GenAIAgentDescription returns an attribute KeyValue conforming to the +// "gen_ai.agent.description" semantic conventions. It represents the free-form +// description of the GenAI agent provided by the application. +func GenAIAgentDescription(val string) attribute.KeyValue { + return GenAIAgentDescriptionKey.String(val) +} + +// GenAIAgentID returns an attribute KeyValue conforming to the "gen_ai.agent.id" +// semantic conventions. It represents the unique identifier of the GenAI agent. +func GenAIAgentID(val string) attribute.KeyValue { + return GenAIAgentIDKey.String(val) +} + +// GenAIAgentName returns an attribute KeyValue conforming to the +// "gen_ai.agent.name" semantic conventions. It represents the human-readable +// name of the GenAI agent provided by the application. +func GenAIAgentName(val string) attribute.KeyValue { + return GenAIAgentNameKey.String(val) +} + +// GenAIConversationID returns an attribute KeyValue conforming to the +// "gen_ai.conversation.id" semantic conventions. It represents the unique +// identifier for a conversation (session, thread), used to store and correlate +// messages within this conversation. +func GenAIConversationID(val string) attribute.KeyValue { + return GenAIConversationIDKey.String(val) +} + +// GenAIDataSourceID returns an attribute KeyValue conforming to the +// "gen_ai.data_source.id" semantic conventions. It represents the data source +// identifier. +func GenAIDataSourceID(val string) attribute.KeyValue { + return GenAIDataSourceIDKey.String(val) +} + +// GenAIOpenAIResponseServiceTier returns an attribute KeyValue conforming to the +// "gen_ai.openai.response.service_tier" semantic conventions. It represents the +// service tier used for the response. +func GenAIOpenAIResponseServiceTier(val string) attribute.KeyValue { + return GenAIOpenAIResponseServiceTierKey.String(val) +} + +// GenAIOpenAIResponseSystemFingerprint returns an attribute KeyValue conforming +// to the "gen_ai.openai.response.system_fingerprint" semantic conventions. It +// represents a fingerprint to track any eventual change in the Generative AI +// environment. +func GenAIOpenAIResponseSystemFingerprint(val string) attribute.KeyValue { + return GenAIOpenAIResponseSystemFingerprintKey.String(val) +} + +// GenAIRequestChoiceCount returns an attribute KeyValue conforming to the +// "gen_ai.request.choice.count" semantic conventions. It represents the target +// number of candidate completions to return. +func GenAIRequestChoiceCount(val int) attribute.KeyValue { + return GenAIRequestChoiceCountKey.Int(val) +} + +// GenAIRequestEncodingFormats returns an attribute KeyValue conforming to the +// "gen_ai.request.encoding_formats" semantic conventions. It represents the +// encoding formats requested in an embeddings operation, if specified. +func GenAIRequestEncodingFormats(val ...string) attribute.KeyValue { + return GenAIRequestEncodingFormatsKey.StringSlice(val) +} + +// GenAIRequestFrequencyPenalty returns an attribute KeyValue conforming to the +// "gen_ai.request.frequency_penalty" semantic conventions. It represents the +// frequency penalty setting for the GenAI request. +func GenAIRequestFrequencyPenalty(val float64) attribute.KeyValue { + return GenAIRequestFrequencyPenaltyKey.Float64(val) +} + +// GenAIRequestMaxTokens returns an attribute KeyValue conforming to the +// "gen_ai.request.max_tokens" semantic conventions. It represents the maximum +// number of tokens the model generates for a request. +func GenAIRequestMaxTokens(val int) attribute.KeyValue { + return GenAIRequestMaxTokensKey.Int(val) +} + +// GenAIRequestModel returns an attribute KeyValue conforming to the +// "gen_ai.request.model" semantic conventions. It represents the name of the +// GenAI model a request is being made to. +func GenAIRequestModel(val string) attribute.KeyValue { + return GenAIRequestModelKey.String(val) +} + +// GenAIRequestPresencePenalty returns an attribute KeyValue conforming to the +// "gen_ai.request.presence_penalty" semantic conventions. It represents the +// presence penalty setting for the GenAI request. +func GenAIRequestPresencePenalty(val float64) attribute.KeyValue { + return GenAIRequestPresencePenaltyKey.Float64(val) +} + +// GenAIRequestSeed returns an attribute KeyValue conforming to the +// "gen_ai.request.seed" semantic conventions. It represents the requests with +// same seed value more likely to return same result. +func GenAIRequestSeed(val int) attribute.KeyValue { + return GenAIRequestSeedKey.Int(val) +} + +// GenAIRequestStopSequences returns an attribute KeyValue conforming to the +// "gen_ai.request.stop_sequences" semantic conventions. It represents the list +// of sequences that the model will use to stop generating further tokens. +func GenAIRequestStopSequences(val ...string) attribute.KeyValue { + return GenAIRequestStopSequencesKey.StringSlice(val) +} + +// GenAIRequestTemperature returns an attribute KeyValue conforming to the +// "gen_ai.request.temperature" semantic conventions. It represents the +// temperature setting for the GenAI request. +func GenAIRequestTemperature(val float64) attribute.KeyValue { + return GenAIRequestTemperatureKey.Float64(val) +} + +// GenAIRequestTopK returns an attribute KeyValue conforming to the +// "gen_ai.request.top_k" semantic conventions. It represents the top_k sampling +// setting for the GenAI request. +func GenAIRequestTopK(val float64) attribute.KeyValue { + return GenAIRequestTopKKey.Float64(val) +} + +// GenAIRequestTopP returns an attribute KeyValue conforming to the +// "gen_ai.request.top_p" semantic conventions. It represents the top_p sampling +// setting for the GenAI request. +func GenAIRequestTopP(val float64) attribute.KeyValue { + return GenAIRequestTopPKey.Float64(val) +} + +// GenAIResponseFinishReasons returns an attribute KeyValue conforming to the +// "gen_ai.response.finish_reasons" semantic conventions. It represents the array +// of reasons the model stopped generating tokens, corresponding to each +// generation received. +func GenAIResponseFinishReasons(val ...string) attribute.KeyValue { + return GenAIResponseFinishReasonsKey.StringSlice(val) +} + +// GenAIResponseID returns an attribute KeyValue conforming to the +// "gen_ai.response.id" semantic conventions. It represents the unique identifier +// for the completion. +func GenAIResponseID(val string) attribute.KeyValue { + return GenAIResponseIDKey.String(val) +} + +// GenAIResponseModel returns an attribute KeyValue conforming to the +// "gen_ai.response.model" semantic conventions. It represents the name of the +// model that generated the response. +func GenAIResponseModel(val string) attribute.KeyValue { + return GenAIResponseModelKey.String(val) +} + +// GenAIToolCallID returns an attribute KeyValue conforming to the +// "gen_ai.tool.call.id" semantic conventions. It represents the tool call +// identifier. +func GenAIToolCallID(val string) attribute.KeyValue { + return GenAIToolCallIDKey.String(val) +} + +// GenAIToolDescription returns an attribute KeyValue conforming to the +// "gen_ai.tool.description" semantic conventions. It represents the tool +// description. +func GenAIToolDescription(val string) attribute.KeyValue { + return GenAIToolDescriptionKey.String(val) +} + +// GenAIToolName returns an attribute KeyValue conforming to the +// "gen_ai.tool.name" semantic conventions. It represents the name of the tool +// utilized by the agent. +func GenAIToolName(val string) attribute.KeyValue { + return GenAIToolNameKey.String(val) +} + +// GenAIToolType returns an attribute KeyValue conforming to the +// "gen_ai.tool.type" semantic conventions. It represents the type of the tool +// utilized by the agent. +func GenAIToolType(val string) attribute.KeyValue { + return GenAIToolTypeKey.String(val) +} + +// GenAIUsageInputTokens returns an attribute KeyValue conforming to the +// "gen_ai.usage.input_tokens" semantic conventions. It represents the number of +// tokens used in the GenAI input (prompt). +func GenAIUsageInputTokens(val int) attribute.KeyValue { + return GenAIUsageInputTokensKey.Int(val) +} + +// GenAIUsageOutputTokens returns an attribute KeyValue conforming to the +// "gen_ai.usage.output_tokens" semantic conventions. It represents the number of +// tokens used in the GenAI response (completion). +func GenAIUsageOutputTokens(val int) attribute.KeyValue { + return GenAIUsageOutputTokensKey.Int(val) +} + +// Enum values for gen_ai.openai.request.service_tier +var ( + // The system will utilize scale tier credits until they are exhausted. + // Stability: development + GenAIOpenAIRequestServiceTierAuto = GenAIOpenAIRequestServiceTierKey.String("auto") + // The system will utilize the default scale tier. + // Stability: development + GenAIOpenAIRequestServiceTierDefault = GenAIOpenAIRequestServiceTierKey.String("default") +) + +// Enum values for gen_ai.operation.name +var ( + // Chat completion operation such as [OpenAI Chat API] + // Stability: development + // + // [OpenAI Chat API]: https://platform.openai.com/docs/api-reference/chat + GenAIOperationNameChat = GenAIOperationNameKey.String("chat") + // Multimodal content generation operation such as [Gemini Generate Content] + // Stability: development + // + // [Gemini Generate Content]: https://ai.google.dev/api/generate-content + GenAIOperationNameGenerateContent = GenAIOperationNameKey.String("generate_content") + // Text completions operation such as [OpenAI Completions API (Legacy)] + // Stability: development + // + // [OpenAI Completions API (Legacy)]: https://platform.openai.com/docs/api-reference/completions + GenAIOperationNameTextCompletion = GenAIOperationNameKey.String("text_completion") + // Embeddings operation such as [OpenAI Create embeddings API] + // Stability: development + // + // [OpenAI Create embeddings API]: https://platform.openai.com/docs/api-reference/embeddings/create + GenAIOperationNameEmbeddings = GenAIOperationNameKey.String("embeddings") + // Create GenAI agent + // Stability: development + GenAIOperationNameCreateAgent = GenAIOperationNameKey.String("create_agent") + // Invoke GenAI agent + // Stability: development + GenAIOperationNameInvokeAgent = GenAIOperationNameKey.String("invoke_agent") + // Execute a tool + // Stability: development + GenAIOperationNameExecuteTool = GenAIOperationNameKey.String("execute_tool") +) + +// Enum values for gen_ai.output.type +var ( + // Plain text + // Stability: development + GenAIOutputTypeText = GenAIOutputTypeKey.String("text") + // JSON object with known or unknown schema + // Stability: development + GenAIOutputTypeJSON = GenAIOutputTypeKey.String("json") + // Image + // Stability: development + GenAIOutputTypeImage = GenAIOutputTypeKey.String("image") + // Speech + // Stability: development + GenAIOutputTypeSpeech = GenAIOutputTypeKey.String("speech") +) + +// Enum values for gen_ai.system +var ( + // OpenAI + // Stability: development + GenAISystemOpenAI = GenAISystemKey.String("openai") + // Any Google generative AI endpoint + // Stability: development + GenAISystemGCPGenAI = GenAISystemKey.String("gcp.gen_ai") + // Vertex AI + // Stability: development + GenAISystemGCPVertexAI = GenAISystemKey.String("gcp.vertex_ai") + // Gemini + // Stability: development + GenAISystemGCPGemini = GenAISystemKey.String("gcp.gemini") + // Deprecated: Use 'gcp.vertex_ai' instead. + GenAISystemVertexAI = GenAISystemKey.String("vertex_ai") + // Deprecated: Use 'gcp.gemini' instead. + GenAISystemGemini = GenAISystemKey.String("gemini") + // Anthropic + // Stability: development + GenAISystemAnthropic = GenAISystemKey.String("anthropic") + // Cohere + // Stability: development + GenAISystemCohere = GenAISystemKey.String("cohere") + // Azure AI Inference + // Stability: development + GenAISystemAzAIInference = GenAISystemKey.String("az.ai.inference") + // Azure OpenAI + // Stability: development + GenAISystemAzAIOpenAI = GenAISystemKey.String("az.ai.openai") + // IBM Watsonx AI + // Stability: development + GenAISystemIBMWatsonxAI = GenAISystemKey.String("ibm.watsonx.ai") + // AWS Bedrock + // Stability: development + GenAISystemAWSBedrock = GenAISystemKey.String("aws.bedrock") + // Perplexity + // Stability: development + GenAISystemPerplexity = GenAISystemKey.String("perplexity") + // xAI + // Stability: development + GenAISystemXai = GenAISystemKey.String("xai") + // DeepSeek + // Stability: development + GenAISystemDeepseek = GenAISystemKey.String("deepseek") + // Groq + // Stability: development + GenAISystemGroq = GenAISystemKey.String("groq") + // Mistral AI + // Stability: development + GenAISystemMistralAI = GenAISystemKey.String("mistral_ai") +) + +// Enum values for gen_ai.token.type +var ( + // Input tokens (prompt, input, etc.) + // Stability: development + GenAITokenTypeInput = GenAITokenTypeKey.String("input") + // Deprecated: Replaced by `output`. + GenAITokenTypeCompletion = GenAITokenTypeKey.String("output") + // Output tokens (completion, response, etc.) + // Stability: development + GenAITokenTypeOutput = GenAITokenTypeKey.String("output") +) + +// Namespace: geo +const ( + // GeoContinentCodeKey is the attribute Key conforming to the + // "geo.continent.code" semantic conventions. It represents the two-letter code + // representing continent’s name. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + GeoContinentCodeKey = attribute.Key("geo.continent.code") + + // GeoCountryISOCodeKey is the attribute Key conforming to the + // "geo.country.iso_code" semantic conventions. It represents the two-letter ISO + // Country Code ([ISO 3166-1 alpha2]). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "CA" + // + // [ISO 3166-1 alpha2]: https://wikipedia.org/wiki/ISO_3166-1#Codes + GeoCountryISOCodeKey = attribute.Key("geo.country.iso_code") + + // GeoLocalityNameKey is the attribute Key conforming to the "geo.locality.name" + // semantic conventions. It represents the locality name. Represents the name of + // a city, town, village, or similar populated place. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Montreal", "Berlin" + GeoLocalityNameKey = attribute.Key("geo.locality.name") + + // GeoLocationLatKey is the attribute Key conforming to the "geo.location.lat" + // semantic conventions. It represents the latitude of the geo location in + // [WGS84]. + // + // Type: double + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 45.505918 + // + // [WGS84]: https://wikipedia.org/wiki/World_Geodetic_System#WGS84 + GeoLocationLatKey = attribute.Key("geo.location.lat") + + // GeoLocationLonKey is the attribute Key conforming to the "geo.location.lon" + // semantic conventions. It represents the longitude of the geo location in + // [WGS84]. + // + // Type: double + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: -73.61483 + // + // [WGS84]: https://wikipedia.org/wiki/World_Geodetic_System#WGS84 + GeoLocationLonKey = attribute.Key("geo.location.lon") + + // GeoPostalCodeKey is the attribute Key conforming to the "geo.postal_code" + // semantic conventions. It represents the postal code associated with the + // location. Values appropriate for this field may also be known as a postcode + // or ZIP code and will vary widely from country to country. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "94040" + GeoPostalCodeKey = attribute.Key("geo.postal_code") + + // GeoRegionISOCodeKey is the attribute Key conforming to the + // "geo.region.iso_code" semantic conventions. It represents the region ISO code + // ([ISO 3166-2]). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "CA-QC" + // + // [ISO 3166-2]: https://wikipedia.org/wiki/ISO_3166-2 + GeoRegionISOCodeKey = attribute.Key("geo.region.iso_code") +) + +// GeoCountryISOCode returns an attribute KeyValue conforming to the +// "geo.country.iso_code" semantic conventions. It represents the two-letter ISO +// Country Code ([ISO 3166-1 alpha2]). +// +// [ISO 3166-1 alpha2]: https://wikipedia.org/wiki/ISO_3166-1#Codes +func GeoCountryISOCode(val string) attribute.KeyValue { + return GeoCountryISOCodeKey.String(val) +} + +// GeoLocalityName returns an attribute KeyValue conforming to the +// "geo.locality.name" semantic conventions. It represents the locality name. +// Represents the name of a city, town, village, or similar populated place. +func GeoLocalityName(val string) attribute.KeyValue { + return GeoLocalityNameKey.String(val) +} + +// GeoLocationLat returns an attribute KeyValue conforming to the +// "geo.location.lat" semantic conventions. It represents the latitude of the geo +// location in [WGS84]. +// +// [WGS84]: https://wikipedia.org/wiki/World_Geodetic_System#WGS84 +func GeoLocationLat(val float64) attribute.KeyValue { + return GeoLocationLatKey.Float64(val) +} + +// GeoLocationLon returns an attribute KeyValue conforming to the +// "geo.location.lon" semantic conventions. It represents the longitude of the +// geo location in [WGS84]. +// +// [WGS84]: https://wikipedia.org/wiki/World_Geodetic_System#WGS84 +func GeoLocationLon(val float64) attribute.KeyValue { + return GeoLocationLonKey.Float64(val) +} + +// GeoPostalCode returns an attribute KeyValue conforming to the +// "geo.postal_code" semantic conventions. It represents the postal code +// associated with the location. Values appropriate for this field may also be +// known as a postcode or ZIP code and will vary widely from country to country. +func GeoPostalCode(val string) attribute.KeyValue { + return GeoPostalCodeKey.String(val) +} + +// GeoRegionISOCode returns an attribute KeyValue conforming to the +// "geo.region.iso_code" semantic conventions. It represents the region ISO code +// ([ISO 3166-2]). +// +// [ISO 3166-2]: https://wikipedia.org/wiki/ISO_3166-2 +func GeoRegionISOCode(val string) attribute.KeyValue { + return GeoRegionISOCodeKey.String(val) +} + +// Enum values for geo.continent.code +var ( + // Africa + // Stability: development + GeoContinentCodeAf = GeoContinentCodeKey.String("AF") + // Antarctica + // Stability: development + GeoContinentCodeAn = GeoContinentCodeKey.String("AN") + // Asia + // Stability: development + GeoContinentCodeAs = GeoContinentCodeKey.String("AS") + // Europe + // Stability: development + GeoContinentCodeEu = GeoContinentCodeKey.String("EU") + // North America + // Stability: development + GeoContinentCodeNa = GeoContinentCodeKey.String("NA") + // Oceania + // Stability: development + GeoContinentCodeOc = GeoContinentCodeKey.String("OC") + // South America + // Stability: development + GeoContinentCodeSa = GeoContinentCodeKey.String("SA") +) + +// Namespace: go +const ( + // GoMemoryTypeKey is the attribute Key conforming to the "go.memory.type" + // semantic conventions. It represents the type of memory. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "other", "stack" + GoMemoryTypeKey = attribute.Key("go.memory.type") +) + +// Enum values for go.memory.type +var ( + // Memory allocated from the heap that is reserved for stack space, whether or + // not it is currently in-use. + // Stability: development + GoMemoryTypeStack = GoMemoryTypeKey.String("stack") + // Memory used by the Go runtime, excluding other categories of memory usage + // described in this enumeration. + // Stability: development + GoMemoryTypeOther = GoMemoryTypeKey.String("other") +) + +// Namespace: graphql +const ( + // GraphQLDocumentKey is the attribute Key conforming to the "graphql.document" + // semantic conventions. It represents the GraphQL document being executed. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: query findBookById { bookById(id: ?) { name } } + // Note: The value may be sanitized to exclude sensitive information. + GraphQLDocumentKey = attribute.Key("graphql.document") + + // GraphQLOperationNameKey is the attribute Key conforming to the + // "graphql.operation.name" semantic conventions. It represents the name of the + // operation being executed. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: findBookById + GraphQLOperationNameKey = attribute.Key("graphql.operation.name") + + // GraphQLOperationTypeKey is the attribute Key conforming to the + // "graphql.operation.type" semantic conventions. It represents the type of the + // operation being executed. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "query", "mutation", "subscription" + GraphQLOperationTypeKey = attribute.Key("graphql.operation.type") +) + +// GraphQLDocument returns an attribute KeyValue conforming to the +// "graphql.document" semantic conventions. It represents the GraphQL document +// being executed. +func GraphQLDocument(val string) attribute.KeyValue { + return GraphQLDocumentKey.String(val) +} + +// GraphQLOperationName returns an attribute KeyValue conforming to the +// "graphql.operation.name" semantic conventions. It represents the name of the +// operation being executed. +func GraphQLOperationName(val string) attribute.KeyValue { + return GraphQLOperationNameKey.String(val) +} + +// Enum values for graphql.operation.type +var ( + // GraphQL query + // Stability: development + GraphQLOperationTypeQuery = GraphQLOperationTypeKey.String("query") + // GraphQL mutation + // Stability: development + GraphQLOperationTypeMutation = GraphQLOperationTypeKey.String("mutation") + // GraphQL subscription + // Stability: development + GraphQLOperationTypeSubscription = GraphQLOperationTypeKey.String("subscription") +) + +// Namespace: heroku +const ( + // HerokuAppIDKey is the attribute Key conforming to the "heroku.app.id" + // semantic conventions. It represents the unique identifier for the + // application. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2daa2797-e42b-4624-9322-ec3f968df4da" + HerokuAppIDKey = attribute.Key("heroku.app.id") + + // HerokuReleaseCommitKey is the attribute Key conforming to the + // "heroku.release.commit" semantic conventions. It represents the commit hash + // for the current release. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "e6134959463efd8966b20e75b913cafe3f5ec" + HerokuReleaseCommitKey = attribute.Key("heroku.release.commit") + + // HerokuReleaseCreationTimestampKey is the attribute Key conforming to the + // "heroku.release.creation_timestamp" semantic conventions. It represents the + // time and date the release was created. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2022-10-23T18:00:42Z" + HerokuReleaseCreationTimestampKey = attribute.Key("heroku.release.creation_timestamp") +) + +// HerokuAppID returns an attribute KeyValue conforming to the "heroku.app.id" +// semantic conventions. It represents the unique identifier for the application. +func HerokuAppID(val string) attribute.KeyValue { + return HerokuAppIDKey.String(val) +} + +// HerokuReleaseCommit returns an attribute KeyValue conforming to the +// "heroku.release.commit" semantic conventions. It represents the commit hash +// for the current release. +func HerokuReleaseCommit(val string) attribute.KeyValue { + return HerokuReleaseCommitKey.String(val) +} + +// HerokuReleaseCreationTimestamp returns an attribute KeyValue conforming to the +// "heroku.release.creation_timestamp" semantic conventions. It represents the +// time and date the release was created. +func HerokuReleaseCreationTimestamp(val string) attribute.KeyValue { + return HerokuReleaseCreationTimestampKey.String(val) +} + +// Namespace: host +const ( + // HostArchKey is the attribute Key conforming to the "host.arch" semantic + // conventions. It represents the CPU architecture the host system is running + // on. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + HostArchKey = attribute.Key("host.arch") + + // HostCPUCacheL2SizeKey is the attribute Key conforming to the + // "host.cpu.cache.l2.size" semantic conventions. It represents the amount of + // level 2 memory cache available to the processor (in Bytes). + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 12288000 + HostCPUCacheL2SizeKey = attribute.Key("host.cpu.cache.l2.size") + + // HostCPUFamilyKey is the attribute Key conforming to the "host.cpu.family" + // semantic conventions. It represents the family or generation of the CPU. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "6", "PA-RISC 1.1e" + HostCPUFamilyKey = attribute.Key("host.cpu.family") + + // HostCPUModelIDKey is the attribute Key conforming to the "host.cpu.model.id" + // semantic conventions. It represents the model identifier. It provides more + // granular information about the CPU, distinguishing it from other CPUs within + // the same family. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "6", "9000/778/B180L" + HostCPUModelIDKey = attribute.Key("host.cpu.model.id") + + // HostCPUModelNameKey is the attribute Key conforming to the + // "host.cpu.model.name" semantic conventions. It represents the model + // designation of the processor. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "11th Gen Intel(R) Core(TM) i7-1185G7 @ 3.00GHz" + HostCPUModelNameKey = attribute.Key("host.cpu.model.name") + + // HostCPUSteppingKey is the attribute Key conforming to the "host.cpu.stepping" + // semantic conventions. It represents the stepping or core revisions. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1", "r1p1" + HostCPUSteppingKey = attribute.Key("host.cpu.stepping") + + // HostCPUVendorIDKey is the attribute Key conforming to the + // "host.cpu.vendor.id" semantic conventions. It represents the processor + // manufacturer identifier. A maximum 12-character string. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "GenuineIntel" + // Note: [CPUID] command returns the vendor ID string in EBX, EDX and ECX + // registers. Writing these to memory in this order results in a 12-character + // string. + // + // [CPUID]: https://wiki.osdev.org/CPUID + HostCPUVendorIDKey = attribute.Key("host.cpu.vendor.id") + + // HostIDKey is the attribute Key conforming to the "host.id" semantic + // conventions. It represents the unique host ID. For Cloud, this must be the + // instance_id assigned by the cloud provider. For non-containerized systems, + // this should be the `machine-id`. See the table below for the sources to use + // to determine the `machine-id` based on operating system. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "fdbf79e8af94cb7f9e8df36789187052" + HostIDKey = attribute.Key("host.id") + + // HostImageIDKey is the attribute Key conforming to the "host.image.id" + // semantic conventions. It represents the VM image ID or host OS image ID. For + // Cloud, this value is from the provider. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "ami-07b06b442921831e5" + HostImageIDKey = attribute.Key("host.image.id") + + // HostImageNameKey is the attribute Key conforming to the "host.image.name" + // semantic conventions. It represents the name of the VM image or OS install + // the host was instantiated from. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "infra-ami-eks-worker-node-7d4ec78312", "CentOS-8-x86_64-1905" + HostImageNameKey = attribute.Key("host.image.name") + + // HostImageVersionKey is the attribute Key conforming to the + // "host.image.version" semantic conventions. It represents the version string + // of the VM image or host OS as defined in [Version Attributes]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "0.1" + // + // [Version Attributes]: /docs/resource/README.md#version-attributes + HostImageVersionKey = attribute.Key("host.image.version") + + // HostIPKey is the attribute Key conforming to the "host.ip" semantic + // conventions. It represents the available IP addresses of the host, excluding + // loopback interfaces. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "192.168.1.140", "fe80::abc2:4a28:737a:609e" + // Note: IPv4 Addresses MUST be specified in dotted-quad notation. IPv6 + // addresses MUST be specified in the [RFC 5952] format. + // + // [RFC 5952]: https://www.rfc-editor.org/rfc/rfc5952.html + HostIPKey = attribute.Key("host.ip") + + // HostMacKey is the attribute Key conforming to the "host.mac" semantic + // conventions. It represents the available MAC addresses of the host, excluding + // loopback interfaces. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "AC-DE-48-23-45-67", "AC-DE-48-23-45-67-01-9F" + // Note: MAC Addresses MUST be represented in [IEEE RA hexadecimal form]: as + // hyphen-separated octets in uppercase hexadecimal form from most to least + // significant. + // + // [IEEE RA hexadecimal form]: https://standards.ieee.org/wp-content/uploads/import/documents/tutorials/eui.pdf + HostMacKey = attribute.Key("host.mac") + + // HostNameKey is the attribute Key conforming to the "host.name" semantic + // conventions. It represents the name of the host. On Unix systems, it may + // contain what the hostname command returns, or the fully qualified hostname, + // or another name specified by the user. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry-test" + HostNameKey = attribute.Key("host.name") + + // HostTypeKey is the attribute Key conforming to the "host.type" semantic + // conventions. It represents the type of host. For Cloud, this must be the + // machine type. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "n1-standard-1" + HostTypeKey = attribute.Key("host.type") +) + +// HostCPUCacheL2Size returns an attribute KeyValue conforming to the +// "host.cpu.cache.l2.size" semantic conventions. It represents the amount of +// level 2 memory cache available to the processor (in Bytes). +func HostCPUCacheL2Size(val int) attribute.KeyValue { + return HostCPUCacheL2SizeKey.Int(val) +} + +// HostCPUFamily returns an attribute KeyValue conforming to the +// "host.cpu.family" semantic conventions. It represents the family or generation +// of the CPU. +func HostCPUFamily(val string) attribute.KeyValue { + return HostCPUFamilyKey.String(val) +} + +// HostCPUModelID returns an attribute KeyValue conforming to the +// "host.cpu.model.id" semantic conventions. It represents the model identifier. +// It provides more granular information about the CPU, distinguishing it from +// other CPUs within the same family. +func HostCPUModelID(val string) attribute.KeyValue { + return HostCPUModelIDKey.String(val) +} + +// HostCPUModelName returns an attribute KeyValue conforming to the +// "host.cpu.model.name" semantic conventions. It represents the model +// designation of the processor. +func HostCPUModelName(val string) attribute.KeyValue { + return HostCPUModelNameKey.String(val) +} + +// HostCPUStepping returns an attribute KeyValue conforming to the +// "host.cpu.stepping" semantic conventions. It represents the stepping or core +// revisions. +func HostCPUStepping(val string) attribute.KeyValue { + return HostCPUSteppingKey.String(val) +} + +// HostCPUVendorID returns an attribute KeyValue conforming to the +// "host.cpu.vendor.id" semantic conventions. It represents the processor +// manufacturer identifier. A maximum 12-character string. +func HostCPUVendorID(val string) attribute.KeyValue { + return HostCPUVendorIDKey.String(val) +} + +// HostID returns an attribute KeyValue conforming to the "host.id" semantic +// conventions. It represents the unique host ID. For Cloud, this must be the +// instance_id assigned by the cloud provider. For non-containerized systems, +// this should be the `machine-id`. See the table below for the sources to use to +// determine the `machine-id` based on operating system. +func HostID(val string) attribute.KeyValue { + return HostIDKey.String(val) +} + +// HostImageID returns an attribute KeyValue conforming to the "host.image.id" +// semantic conventions. It represents the VM image ID or host OS image ID. For +// Cloud, this value is from the provider. +func HostImageID(val string) attribute.KeyValue { + return HostImageIDKey.String(val) +} + +// HostImageName returns an attribute KeyValue conforming to the +// "host.image.name" semantic conventions. It represents the name of the VM image +// or OS install the host was instantiated from. +func HostImageName(val string) attribute.KeyValue { + return HostImageNameKey.String(val) +} + +// HostImageVersion returns an attribute KeyValue conforming to the +// "host.image.version" semantic conventions. It represents the version string of +// the VM image or host OS as defined in [Version Attributes]. +// +// [Version Attributes]: /docs/resource/README.md#version-attributes +func HostImageVersion(val string) attribute.KeyValue { + return HostImageVersionKey.String(val) +} + +// HostIP returns an attribute KeyValue conforming to the "host.ip" semantic +// conventions. It represents the available IP addresses of the host, excluding +// loopback interfaces. +func HostIP(val ...string) attribute.KeyValue { + return HostIPKey.StringSlice(val) +} + +// HostMac returns an attribute KeyValue conforming to the "host.mac" semantic +// conventions. It represents the available MAC addresses of the host, excluding +// loopback interfaces. +func HostMac(val ...string) attribute.KeyValue { + return HostMacKey.StringSlice(val) +} + +// HostName returns an attribute KeyValue conforming to the "host.name" semantic +// conventions. It represents the name of the host. On Unix systems, it may +// contain what the hostname command returns, or the fully qualified hostname, or +// another name specified by the user. +func HostName(val string) attribute.KeyValue { + return HostNameKey.String(val) +} + +// HostType returns an attribute KeyValue conforming to the "host.type" semantic +// conventions. It represents the type of host. For Cloud, this must be the +// machine type. +func HostType(val string) attribute.KeyValue { + return HostTypeKey.String(val) +} + +// Enum values for host.arch +var ( + // AMD64 + // Stability: development + HostArchAMD64 = HostArchKey.String("amd64") + // ARM32 + // Stability: development + HostArchARM32 = HostArchKey.String("arm32") + // ARM64 + // Stability: development + HostArchARM64 = HostArchKey.String("arm64") + // Itanium + // Stability: development + HostArchIA64 = HostArchKey.String("ia64") + // 32-bit PowerPC + // Stability: development + HostArchPPC32 = HostArchKey.String("ppc32") + // 64-bit PowerPC + // Stability: development + HostArchPPC64 = HostArchKey.String("ppc64") + // IBM z/Architecture + // Stability: development + HostArchS390x = HostArchKey.String("s390x") + // 32-bit x86 + // Stability: development + HostArchX86 = HostArchKey.String("x86") +) + +// Namespace: http +const ( + // HTTPConnectionStateKey is the attribute Key conforming to the + // "http.connection.state" semantic conventions. It represents the state of the + // HTTP connection in the HTTP connection pool. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "active", "idle" + HTTPConnectionStateKey = attribute.Key("http.connection.state") + + // HTTPRequestBodySizeKey is the attribute Key conforming to the + // "http.request.body.size" semantic conventions. It represents the size of the + // request payload body in bytes. This is the number of bytes transferred + // excluding headers and is often, but not always, present as the + // [Content-Length] header. For requests using transport encoding, this should + // be the compressed size. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // [Content-Length]: https://www.rfc-editor.org/rfc/rfc9110.html#field.content-length + HTTPRequestBodySizeKey = attribute.Key("http.request.body.size") + + // HTTPRequestMethodKey is the attribute Key conforming to the + // "http.request.method" semantic conventions. It represents the HTTP request + // method. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "GET", "POST", "HEAD" + // Note: HTTP request method value SHOULD be "known" to the instrumentation. + // By default, this convention defines "known" methods as the ones listed in + // [RFC9110] + // and the PATCH method defined in [RFC5789]. + // + // If the HTTP request method is not known to instrumentation, it MUST set the + // `http.request.method` attribute to `_OTHER`. + // + // If the HTTP instrumentation could end up converting valid HTTP request + // methods to `_OTHER`, then it MUST provide a way to override + // the list of known HTTP methods. If this override is done via environment + // variable, then the environment variable MUST be named + // OTEL_INSTRUMENTATION_HTTP_KNOWN_METHODS and support a comma-separated list of + // case-sensitive known HTTP methods + // (this list MUST be a full override of the default known method, it is not a + // list of known methods in addition to the defaults). + // + // HTTP method names are case-sensitive and `http.request.method` attribute + // value MUST match a known HTTP method name exactly. + // Instrumentations for specific web frameworks that consider HTTP methods to be + // case insensitive, SHOULD populate a canonical equivalent. + // Tracing instrumentations that do so, MUST also set + // `http.request.method_original` to the original value. + // + // [RFC9110]: https://www.rfc-editor.org/rfc/rfc9110.html#name-methods + // [RFC5789]: https://www.rfc-editor.org/rfc/rfc5789.html + HTTPRequestMethodKey = attribute.Key("http.request.method") + + // HTTPRequestMethodOriginalKey is the attribute Key conforming to the + // "http.request.method_original" semantic conventions. It represents the + // original HTTP method sent by the client in the request line. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "GeT", "ACL", "foo" + HTTPRequestMethodOriginalKey = attribute.Key("http.request.method_original") + + // HTTPRequestResendCountKey is the attribute Key conforming to the + // "http.request.resend_count" semantic conventions. It represents the ordinal + // number of request resending attempt (for any reason, including redirects). + // + // Type: int + // RequirementLevel: Recommended + // Stability: Stable + // + // Note: The resend count SHOULD be updated each time an HTTP request gets + // resent by the client, regardless of what was the cause of the resending (e.g. + // redirection, authorization failure, 503 Server Unavailable, network issues, + // or any other). + HTTPRequestResendCountKey = attribute.Key("http.request.resend_count") + + // HTTPRequestSizeKey is the attribute Key conforming to the "http.request.size" + // semantic conventions. It represents the total size of the request in bytes. + // This should be the total number of bytes sent over the wire, including the + // request line (HTTP/1.1), framing (HTTP/2 and HTTP/3), headers, and request + // body if any. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + HTTPRequestSizeKey = attribute.Key("http.request.size") + + // HTTPResponseBodySizeKey is the attribute Key conforming to the + // "http.response.body.size" semantic conventions. It represents the size of the + // response payload body in bytes. This is the number of bytes transferred + // excluding headers and is often, but not always, present as the + // [Content-Length] header. For requests using transport encoding, this should + // be the compressed size. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // [Content-Length]: https://www.rfc-editor.org/rfc/rfc9110.html#field.content-length + HTTPResponseBodySizeKey = attribute.Key("http.response.body.size") + + // HTTPResponseSizeKey is the attribute Key conforming to the + // "http.response.size" semantic conventions. It represents the total size of + // the response in bytes. This should be the total number of bytes sent over the + // wire, including the status line (HTTP/1.1), framing (HTTP/2 and HTTP/3), + // headers, and response body and trailers if any. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + HTTPResponseSizeKey = attribute.Key("http.response.size") + + // HTTPResponseStatusCodeKey is the attribute Key conforming to the + // "http.response.status_code" semantic conventions. It represents the + // [HTTP response status code]. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: 200 + // + // [HTTP response status code]: https://tools.ietf.org/html/rfc7231#section-6 + HTTPResponseStatusCodeKey = attribute.Key("http.response.status_code") + + // HTTPRouteKey is the attribute Key conforming to the "http.route" semantic + // conventions. It represents the matched route, that is, the path template in + // the format used by the respective server framework. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "/users/:userID?", "{controller}/{action}/{id?}" + // Note: MUST NOT be populated when this is not supported by the HTTP server + // framework as the route attribute should have low-cardinality and the URI path + // can NOT substitute it. + // SHOULD include the [application root] if there is one. + // + // [application root]: /docs/http/http-spans.md#http-server-definitions + HTTPRouteKey = attribute.Key("http.route") +) + +// HTTPRequestBodySize returns an attribute KeyValue conforming to the +// "http.request.body.size" semantic conventions. It represents the size of the +// request payload body in bytes. This is the number of bytes transferred +// excluding headers and is often, but not always, present as the +// [Content-Length] header. For requests using transport encoding, this should be +// the compressed size. +// +// [Content-Length]: https://www.rfc-editor.org/rfc/rfc9110.html#field.content-length +func HTTPRequestBodySize(val int) attribute.KeyValue { + return HTTPRequestBodySizeKey.Int(val) +} + +// HTTPRequestMethodOriginal returns an attribute KeyValue conforming to the +// "http.request.method_original" semantic conventions. It represents the +// original HTTP method sent by the client in the request line. +func HTTPRequestMethodOriginal(val string) attribute.KeyValue { + return HTTPRequestMethodOriginalKey.String(val) +} + +// HTTPRequestResendCount returns an attribute KeyValue conforming to the +// "http.request.resend_count" semantic conventions. It represents the ordinal +// number of request resending attempt (for any reason, including redirects). +func HTTPRequestResendCount(val int) attribute.KeyValue { + return HTTPRequestResendCountKey.Int(val) +} + +// HTTPRequestSize returns an attribute KeyValue conforming to the +// "http.request.size" semantic conventions. It represents the total size of the +// request in bytes. This should be the total number of bytes sent over the wire, +// including the request line (HTTP/1.1), framing (HTTP/2 and HTTP/3), headers, +// and request body if any. +func HTTPRequestSize(val int) attribute.KeyValue { + return HTTPRequestSizeKey.Int(val) +} + +// HTTPResponseBodySize returns an attribute KeyValue conforming to the +// "http.response.body.size" semantic conventions. It represents the size of the +// response payload body in bytes. This is the number of bytes transferred +// excluding headers and is often, but not always, present as the +// [Content-Length] header. For requests using transport encoding, this should be +// the compressed size. +// +// [Content-Length]: https://www.rfc-editor.org/rfc/rfc9110.html#field.content-length +func HTTPResponseBodySize(val int) attribute.KeyValue { + return HTTPResponseBodySizeKey.Int(val) +} + +// HTTPResponseSize returns an attribute KeyValue conforming to the +// "http.response.size" semantic conventions. It represents the total size of the +// response in bytes. This should be the total number of bytes sent over the +// wire, including the status line (HTTP/1.1), framing (HTTP/2 and HTTP/3), +// headers, and response body and trailers if any. +func HTTPResponseSize(val int) attribute.KeyValue { + return HTTPResponseSizeKey.Int(val) +} + +// HTTPResponseStatusCode returns an attribute KeyValue conforming to the +// "http.response.status_code" semantic conventions. It represents the +// [HTTP response status code]. +// +// [HTTP response status code]: https://tools.ietf.org/html/rfc7231#section-6 +func HTTPResponseStatusCode(val int) attribute.KeyValue { + return HTTPResponseStatusCodeKey.Int(val) +} + +// HTTPRoute returns an attribute KeyValue conforming to the "http.route" +// semantic conventions. It represents the matched route, that is, the path +// template in the format used by the respective server framework. +func HTTPRoute(val string) attribute.KeyValue { + return HTTPRouteKey.String(val) +} + +// Enum values for http.connection.state +var ( + // active state. + // Stability: development + HTTPConnectionStateActive = HTTPConnectionStateKey.String("active") + // idle state. + // Stability: development + HTTPConnectionStateIdle = HTTPConnectionStateKey.String("idle") +) + +// Enum values for http.request.method +var ( + // CONNECT method. + // Stability: stable + HTTPRequestMethodConnect = HTTPRequestMethodKey.String("CONNECT") + // DELETE method. + // Stability: stable + HTTPRequestMethodDelete = HTTPRequestMethodKey.String("DELETE") + // GET method. + // Stability: stable + HTTPRequestMethodGet = HTTPRequestMethodKey.String("GET") + // HEAD method. + // Stability: stable + HTTPRequestMethodHead = HTTPRequestMethodKey.String("HEAD") + // OPTIONS method. + // Stability: stable + HTTPRequestMethodOptions = HTTPRequestMethodKey.String("OPTIONS") + // PATCH method. + // Stability: stable + HTTPRequestMethodPatch = HTTPRequestMethodKey.String("PATCH") + // POST method. + // Stability: stable + HTTPRequestMethodPost = HTTPRequestMethodKey.String("POST") + // PUT method. + // Stability: stable + HTTPRequestMethodPut = HTTPRequestMethodKey.String("PUT") + // TRACE method. + // Stability: stable + HTTPRequestMethodTrace = HTTPRequestMethodKey.String("TRACE") + // Any HTTP method that the instrumentation has no prior knowledge of. + // Stability: stable + HTTPRequestMethodOther = HTTPRequestMethodKey.String("_OTHER") +) + +// Namespace: hw +const ( + // HwIDKey is the attribute Key conforming to the "hw.id" semantic conventions. + // It represents an identifier for the hardware component, unique within the + // monitored host. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "win32battery_battery_testsysa33_1" + HwIDKey = attribute.Key("hw.id") + + // HwNameKey is the attribute Key conforming to the "hw.name" semantic + // conventions. It represents an easily-recognizable name for the hardware + // component. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "eth0" + HwNameKey = attribute.Key("hw.name") + + // HwParentKey is the attribute Key conforming to the "hw.parent" semantic + // conventions. It represents the unique identifier of the parent component + // (typically the `hw.id` attribute of the enclosure, or disk controller). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "dellStorage_perc_0" + HwParentKey = attribute.Key("hw.parent") + + // HwStateKey is the attribute Key conforming to the "hw.state" semantic + // conventions. It represents the current state of the component. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + HwStateKey = attribute.Key("hw.state") + + // HwTypeKey is the attribute Key conforming to the "hw.type" semantic + // conventions. It represents the type of the component. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: Describes the category of the hardware component for which `hw.state` + // is being reported. For example, `hw.type=temperature` along with + // `hw.state=degraded` would indicate that the temperature of the hardware + // component has been reported as `degraded`. + HwTypeKey = attribute.Key("hw.type") +) + +// HwID returns an attribute KeyValue conforming to the "hw.id" semantic +// conventions. It represents an identifier for the hardware component, unique +// within the monitored host. +func HwID(val string) attribute.KeyValue { + return HwIDKey.String(val) +} + +// HwName returns an attribute KeyValue conforming to the "hw.name" semantic +// conventions. It represents an easily-recognizable name for the hardware +// component. +func HwName(val string) attribute.KeyValue { + return HwNameKey.String(val) +} + +// HwParent returns an attribute KeyValue conforming to the "hw.parent" semantic +// conventions. It represents the unique identifier of the parent component +// (typically the `hw.id` attribute of the enclosure, or disk controller). +func HwParent(val string) attribute.KeyValue { + return HwParentKey.String(val) +} + +// Enum values for hw.state +var ( + // Ok + // Stability: development + HwStateOk = HwStateKey.String("ok") + // Degraded + // Stability: development + HwStateDegraded = HwStateKey.String("degraded") + // Failed + // Stability: development + HwStateFailed = HwStateKey.String("failed") +) + +// Enum values for hw.type +var ( + // Battery + // Stability: development + HwTypeBattery = HwTypeKey.String("battery") + // CPU + // Stability: development + HwTypeCPU = HwTypeKey.String("cpu") + // Disk controller + // Stability: development + HwTypeDiskController = HwTypeKey.String("disk_controller") + // Enclosure + // Stability: development + HwTypeEnclosure = HwTypeKey.String("enclosure") + // Fan + // Stability: development + HwTypeFan = HwTypeKey.String("fan") + // GPU + // Stability: development + HwTypeGpu = HwTypeKey.String("gpu") + // Logical disk + // Stability: development + HwTypeLogicalDisk = HwTypeKey.String("logical_disk") + // Memory + // Stability: development + HwTypeMemory = HwTypeKey.String("memory") + // Network + // Stability: development + HwTypeNetwork = HwTypeKey.String("network") + // Physical disk + // Stability: development + HwTypePhysicalDisk = HwTypeKey.String("physical_disk") + // Power supply + // Stability: development + HwTypePowerSupply = HwTypeKey.String("power_supply") + // Tape drive + // Stability: development + HwTypeTapeDrive = HwTypeKey.String("tape_drive") + // Temperature + // Stability: development + HwTypeTemperature = HwTypeKey.String("temperature") + // Voltage + // Stability: development + HwTypeVoltage = HwTypeKey.String("voltage") +) + +// Namespace: ios +const ( + // IOSAppStateKey is the attribute Key conforming to the "ios.app.state" + // semantic conventions. It represents the this attribute represents the state + // of the application. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: The iOS lifecycle states are defined in the + // [UIApplicationDelegate documentation], and from which the `OS terminology` + // column values are derived. + // + // [UIApplicationDelegate documentation]: https://developer.apple.com/documentation/uikit/uiapplicationdelegate + IOSAppStateKey = attribute.Key("ios.app.state") +) + +// Enum values for ios.app.state +var ( + // The app has become `active`. Associated with UIKit notification + // `applicationDidBecomeActive`. + // + // Stability: development + IOSAppStateActive = IOSAppStateKey.String("active") + // The app is now `inactive`. Associated with UIKit notification + // `applicationWillResignActive`. + // + // Stability: development + IOSAppStateInactive = IOSAppStateKey.String("inactive") + // The app is now in the background. This value is associated with UIKit + // notification `applicationDidEnterBackground`. + // + // Stability: development + IOSAppStateBackground = IOSAppStateKey.String("background") + // The app is now in the foreground. This value is associated with UIKit + // notification `applicationWillEnterForeground`. + // + // Stability: development + IOSAppStateForeground = IOSAppStateKey.String("foreground") + // The app is about to terminate. Associated with UIKit notification + // `applicationWillTerminate`. + // + // Stability: development + IOSAppStateTerminate = IOSAppStateKey.String("terminate") +) + +// Namespace: k8s +const ( + // K8SClusterNameKey is the attribute Key conforming to the "k8s.cluster.name" + // semantic conventions. It represents the name of the cluster. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry-cluster" + K8SClusterNameKey = attribute.Key("k8s.cluster.name") + + // K8SClusterUIDKey is the attribute Key conforming to the "k8s.cluster.uid" + // semantic conventions. It represents a pseudo-ID for the cluster, set to the + // UID of the `kube-system` namespace. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "218fc5a9-a5f1-4b54-aa05-46717d0ab26d" + // Note: K8s doesn't have support for obtaining a cluster ID. If this is ever + // added, we will recommend collecting the `k8s.cluster.uid` through the + // official APIs. In the meantime, we are able to use the `uid` of the + // `kube-system` namespace as a proxy for cluster ID. Read on for the + // rationale. + // + // Every object created in a K8s cluster is assigned a distinct UID. The + // `kube-system` namespace is used by Kubernetes itself and will exist + // for the lifetime of the cluster. Using the `uid` of the `kube-system` + // namespace is a reasonable proxy for the K8s ClusterID as it will only + // change if the cluster is rebuilt. Furthermore, Kubernetes UIDs are + // UUIDs as standardized by + // [ISO/IEC 9834-8 and ITU-T X.667]. + // Which states: + // + // > If generated according to one of the mechanisms defined in Rec. + // > ITU-T X.667 | ISO/IEC 9834-8, a UUID is either guaranteed to be + // > different from all other UUIDs generated before 3603 A.D., or is + // > extremely likely to be different (depending on the mechanism chosen). + // + // Therefore, UIDs between clusters should be extremely unlikely to + // conflict. + // + // [ISO/IEC 9834-8 and ITU-T X.667]: https://www.itu.int/ITU-T/studygroups/com17/oid.html + K8SClusterUIDKey = attribute.Key("k8s.cluster.uid") + + // K8SContainerNameKey is the attribute Key conforming to the + // "k8s.container.name" semantic conventions. It represents the name of the + // Container from Pod specification, must be unique within a Pod. Container + // runtime usually uses different globally unique name (`container.name`). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "redis" + K8SContainerNameKey = attribute.Key("k8s.container.name") + + // K8SContainerRestartCountKey is the attribute Key conforming to the + // "k8s.container.restart_count" semantic conventions. It represents the number + // of times the container was restarted. This attribute can be used to identify + // a particular container (running or stopped) within a container spec. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + K8SContainerRestartCountKey = attribute.Key("k8s.container.restart_count") + + // K8SContainerStatusLastTerminatedReasonKey is the attribute Key conforming to + // the "k8s.container.status.last_terminated_reason" semantic conventions. It + // represents the last terminated reason of the Container. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Evicted", "Error" + K8SContainerStatusLastTerminatedReasonKey = attribute.Key("k8s.container.status.last_terminated_reason") + + // K8SCronJobNameKey is the attribute Key conforming to the "k8s.cronjob.name" + // semantic conventions. It represents the name of the CronJob. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry" + K8SCronJobNameKey = attribute.Key("k8s.cronjob.name") + + // K8SCronJobUIDKey is the attribute Key conforming to the "k8s.cronjob.uid" + // semantic conventions. It represents the UID of the CronJob. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "275ecb36-5aa8-4c2a-9c47-d8bb681b9aff" + K8SCronJobUIDKey = attribute.Key("k8s.cronjob.uid") + + // K8SDaemonSetNameKey is the attribute Key conforming to the + // "k8s.daemonset.name" semantic conventions. It represents the name of the + // DaemonSet. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry" + K8SDaemonSetNameKey = attribute.Key("k8s.daemonset.name") + + // K8SDaemonSetUIDKey is the attribute Key conforming to the "k8s.daemonset.uid" + // semantic conventions. It represents the UID of the DaemonSet. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "275ecb36-5aa8-4c2a-9c47-d8bb681b9aff" + K8SDaemonSetUIDKey = attribute.Key("k8s.daemonset.uid") + + // K8SDeploymentNameKey is the attribute Key conforming to the + // "k8s.deployment.name" semantic conventions. It represents the name of the + // Deployment. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry" + K8SDeploymentNameKey = attribute.Key("k8s.deployment.name") + + // K8SDeploymentUIDKey is the attribute Key conforming to the + // "k8s.deployment.uid" semantic conventions. It represents the UID of the + // Deployment. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "275ecb36-5aa8-4c2a-9c47-d8bb681b9aff" + K8SDeploymentUIDKey = attribute.Key("k8s.deployment.uid") + + // K8SHPANameKey is the attribute Key conforming to the "k8s.hpa.name" semantic + // conventions. It represents the name of the horizontal pod autoscaler. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry" + K8SHPANameKey = attribute.Key("k8s.hpa.name") + + // K8SHPAUIDKey is the attribute Key conforming to the "k8s.hpa.uid" semantic + // conventions. It represents the UID of the horizontal pod autoscaler. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "275ecb36-5aa8-4c2a-9c47-d8bb681b9aff" + K8SHPAUIDKey = attribute.Key("k8s.hpa.uid") + + // K8SJobNameKey is the attribute Key conforming to the "k8s.job.name" semantic + // conventions. It represents the name of the Job. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry" + K8SJobNameKey = attribute.Key("k8s.job.name") + + // K8SJobUIDKey is the attribute Key conforming to the "k8s.job.uid" semantic + // conventions. It represents the UID of the Job. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "275ecb36-5aa8-4c2a-9c47-d8bb681b9aff" + K8SJobUIDKey = attribute.Key("k8s.job.uid") + + // K8SNamespaceNameKey is the attribute Key conforming to the + // "k8s.namespace.name" semantic conventions. It represents the name of the + // namespace that the pod is running in. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "default" + K8SNamespaceNameKey = attribute.Key("k8s.namespace.name") + + // K8SNamespacePhaseKey is the attribute Key conforming to the + // "k8s.namespace.phase" semantic conventions. It represents the phase of the + // K8s namespace. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "active", "terminating" + // Note: This attribute aligns with the `phase` field of the + // [K8s NamespaceStatus] + // + // [K8s NamespaceStatus]: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.30/#namespacestatus-v1-core + K8SNamespacePhaseKey = attribute.Key("k8s.namespace.phase") + + // K8SNodeNameKey is the attribute Key conforming to the "k8s.node.name" + // semantic conventions. It represents the name of the Node. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "node-1" + K8SNodeNameKey = attribute.Key("k8s.node.name") + + // K8SNodeUIDKey is the attribute Key conforming to the "k8s.node.uid" semantic + // conventions. It represents the UID of the Node. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1eb3a0c6-0477-4080-a9cb-0cb7db65c6a2" + K8SNodeUIDKey = attribute.Key("k8s.node.uid") + + // K8SPodNameKey is the attribute Key conforming to the "k8s.pod.name" semantic + // conventions. It represents the name of the Pod. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry-pod-autoconf" + K8SPodNameKey = attribute.Key("k8s.pod.name") + + // K8SPodUIDKey is the attribute Key conforming to the "k8s.pod.uid" semantic + // conventions. It represents the UID of the Pod. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "275ecb36-5aa8-4c2a-9c47-d8bb681b9aff" + K8SPodUIDKey = attribute.Key("k8s.pod.uid") + + // K8SReplicaSetNameKey is the attribute Key conforming to the + // "k8s.replicaset.name" semantic conventions. It represents the name of the + // ReplicaSet. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry" + K8SReplicaSetNameKey = attribute.Key("k8s.replicaset.name") + + // K8SReplicaSetUIDKey is the attribute Key conforming to the + // "k8s.replicaset.uid" semantic conventions. It represents the UID of the + // ReplicaSet. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "275ecb36-5aa8-4c2a-9c47-d8bb681b9aff" + K8SReplicaSetUIDKey = attribute.Key("k8s.replicaset.uid") + + // K8SReplicationControllerNameKey is the attribute Key conforming to the + // "k8s.replicationcontroller.name" semantic conventions. It represents the name + // of the replication controller. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry" + K8SReplicationControllerNameKey = attribute.Key("k8s.replicationcontroller.name") + + // K8SReplicationControllerUIDKey is the attribute Key conforming to the + // "k8s.replicationcontroller.uid" semantic conventions. It represents the UID + // of the replication controller. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "275ecb36-5aa8-4c2a-9c47-d8bb681b9aff" + K8SReplicationControllerUIDKey = attribute.Key("k8s.replicationcontroller.uid") + + // K8SResourceQuotaNameKey is the attribute Key conforming to the + // "k8s.resourcequota.name" semantic conventions. It represents the name of the + // resource quota. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry" + K8SResourceQuotaNameKey = attribute.Key("k8s.resourcequota.name") + + // K8SResourceQuotaUIDKey is the attribute Key conforming to the + // "k8s.resourcequota.uid" semantic conventions. It represents the UID of the + // resource quota. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "275ecb36-5aa8-4c2a-9c47-d8bb681b9aff" + K8SResourceQuotaUIDKey = attribute.Key("k8s.resourcequota.uid") + + // K8SStatefulSetNameKey is the attribute Key conforming to the + // "k8s.statefulset.name" semantic conventions. It represents the name of the + // StatefulSet. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry" + K8SStatefulSetNameKey = attribute.Key("k8s.statefulset.name") + + // K8SStatefulSetUIDKey is the attribute Key conforming to the + // "k8s.statefulset.uid" semantic conventions. It represents the UID of the + // StatefulSet. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "275ecb36-5aa8-4c2a-9c47-d8bb681b9aff" + K8SStatefulSetUIDKey = attribute.Key("k8s.statefulset.uid") + + // K8SVolumeNameKey is the attribute Key conforming to the "k8s.volume.name" + // semantic conventions. It represents the name of the K8s volume. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "volume0" + K8SVolumeNameKey = attribute.Key("k8s.volume.name") + + // K8SVolumeTypeKey is the attribute Key conforming to the "k8s.volume.type" + // semantic conventions. It represents the type of the K8s volume. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "emptyDir", "persistentVolumeClaim" + K8SVolumeTypeKey = attribute.Key("k8s.volume.type") +) + +// K8SClusterName returns an attribute KeyValue conforming to the +// "k8s.cluster.name" semantic conventions. It represents the name of the +// cluster. +func K8SClusterName(val string) attribute.KeyValue { + return K8SClusterNameKey.String(val) +} + +// K8SClusterUID returns an attribute KeyValue conforming to the +// "k8s.cluster.uid" semantic conventions. It represents a pseudo-ID for the +// cluster, set to the UID of the `kube-system` namespace. +func K8SClusterUID(val string) attribute.KeyValue { + return K8SClusterUIDKey.String(val) +} + +// K8SContainerName returns an attribute KeyValue conforming to the +// "k8s.container.name" semantic conventions. It represents the name of the +// Container from Pod specification, must be unique within a Pod. Container +// runtime usually uses different globally unique name (`container.name`). +func K8SContainerName(val string) attribute.KeyValue { + return K8SContainerNameKey.String(val) +} + +// K8SContainerRestartCount returns an attribute KeyValue conforming to the +// "k8s.container.restart_count" semantic conventions. It represents the number +// of times the container was restarted. This attribute can be used to identify a +// particular container (running or stopped) within a container spec. +func K8SContainerRestartCount(val int) attribute.KeyValue { + return K8SContainerRestartCountKey.Int(val) +} + +// K8SContainerStatusLastTerminatedReason returns an attribute KeyValue +// conforming to the "k8s.container.status.last_terminated_reason" semantic +// conventions. It represents the last terminated reason of the Container. +func K8SContainerStatusLastTerminatedReason(val string) attribute.KeyValue { + return K8SContainerStatusLastTerminatedReasonKey.String(val) +} + +// K8SCronJobName returns an attribute KeyValue conforming to the +// "k8s.cronjob.name" semantic conventions. It represents the name of the +// CronJob. +func K8SCronJobName(val string) attribute.KeyValue { + return K8SCronJobNameKey.String(val) +} + +// K8SCronJobUID returns an attribute KeyValue conforming to the +// "k8s.cronjob.uid" semantic conventions. It represents the UID of the CronJob. +func K8SCronJobUID(val string) attribute.KeyValue { + return K8SCronJobUIDKey.String(val) +} + +// K8SDaemonSetName returns an attribute KeyValue conforming to the +// "k8s.daemonset.name" semantic conventions. It represents the name of the +// DaemonSet. +func K8SDaemonSetName(val string) attribute.KeyValue { + return K8SDaemonSetNameKey.String(val) +} + +// K8SDaemonSetUID returns an attribute KeyValue conforming to the +// "k8s.daemonset.uid" semantic conventions. It represents the UID of the +// DaemonSet. +func K8SDaemonSetUID(val string) attribute.KeyValue { + return K8SDaemonSetUIDKey.String(val) +} + +// K8SDeploymentName returns an attribute KeyValue conforming to the +// "k8s.deployment.name" semantic conventions. It represents the name of the +// Deployment. +func K8SDeploymentName(val string) attribute.KeyValue { + return K8SDeploymentNameKey.String(val) +} + +// K8SDeploymentUID returns an attribute KeyValue conforming to the +// "k8s.deployment.uid" semantic conventions. It represents the UID of the +// Deployment. +func K8SDeploymentUID(val string) attribute.KeyValue { + return K8SDeploymentUIDKey.String(val) +} + +// K8SHPAName returns an attribute KeyValue conforming to the "k8s.hpa.name" +// semantic conventions. It represents the name of the horizontal pod autoscaler. +func K8SHPAName(val string) attribute.KeyValue { + return K8SHPANameKey.String(val) +} + +// K8SHPAUID returns an attribute KeyValue conforming to the "k8s.hpa.uid" +// semantic conventions. It represents the UID of the horizontal pod autoscaler. +func K8SHPAUID(val string) attribute.KeyValue { + return K8SHPAUIDKey.String(val) +} + +// K8SJobName returns an attribute KeyValue conforming to the "k8s.job.name" +// semantic conventions. It represents the name of the Job. +func K8SJobName(val string) attribute.KeyValue { + return K8SJobNameKey.String(val) +} + +// K8SJobUID returns an attribute KeyValue conforming to the "k8s.job.uid" +// semantic conventions. It represents the UID of the Job. +func K8SJobUID(val string) attribute.KeyValue { + return K8SJobUIDKey.String(val) +} + +// K8SNamespaceName returns an attribute KeyValue conforming to the +// "k8s.namespace.name" semantic conventions. It represents the name of the +// namespace that the pod is running in. +func K8SNamespaceName(val string) attribute.KeyValue { + return K8SNamespaceNameKey.String(val) +} + +// K8SNodeName returns an attribute KeyValue conforming to the "k8s.node.name" +// semantic conventions. It represents the name of the Node. +func K8SNodeName(val string) attribute.KeyValue { + return K8SNodeNameKey.String(val) +} + +// K8SNodeUID returns an attribute KeyValue conforming to the "k8s.node.uid" +// semantic conventions. It represents the UID of the Node. +func K8SNodeUID(val string) attribute.KeyValue { + return K8SNodeUIDKey.String(val) +} + +// K8SPodName returns an attribute KeyValue conforming to the "k8s.pod.name" +// semantic conventions. It represents the name of the Pod. +func K8SPodName(val string) attribute.KeyValue { + return K8SPodNameKey.String(val) +} + +// K8SPodUID returns an attribute KeyValue conforming to the "k8s.pod.uid" +// semantic conventions. It represents the UID of the Pod. +func K8SPodUID(val string) attribute.KeyValue { + return K8SPodUIDKey.String(val) +} + +// K8SReplicaSetName returns an attribute KeyValue conforming to the +// "k8s.replicaset.name" semantic conventions. It represents the name of the +// ReplicaSet. +func K8SReplicaSetName(val string) attribute.KeyValue { + return K8SReplicaSetNameKey.String(val) +} + +// K8SReplicaSetUID returns an attribute KeyValue conforming to the +// "k8s.replicaset.uid" semantic conventions. It represents the UID of the +// ReplicaSet. +func K8SReplicaSetUID(val string) attribute.KeyValue { + return K8SReplicaSetUIDKey.String(val) +} + +// K8SReplicationControllerName returns an attribute KeyValue conforming to the +// "k8s.replicationcontroller.name" semantic conventions. It represents the name +// of the replication controller. +func K8SReplicationControllerName(val string) attribute.KeyValue { + return K8SReplicationControllerNameKey.String(val) +} + +// K8SReplicationControllerUID returns an attribute KeyValue conforming to the +// "k8s.replicationcontroller.uid" semantic conventions. It represents the UID of +// the replication controller. +func K8SReplicationControllerUID(val string) attribute.KeyValue { + return K8SReplicationControllerUIDKey.String(val) +} + +// K8SResourceQuotaName returns an attribute KeyValue conforming to the +// "k8s.resourcequota.name" semantic conventions. It represents the name of the +// resource quota. +func K8SResourceQuotaName(val string) attribute.KeyValue { + return K8SResourceQuotaNameKey.String(val) +} + +// K8SResourceQuotaUID returns an attribute KeyValue conforming to the +// "k8s.resourcequota.uid" semantic conventions. It represents the UID of the +// resource quota. +func K8SResourceQuotaUID(val string) attribute.KeyValue { + return K8SResourceQuotaUIDKey.String(val) +} + +// K8SStatefulSetName returns an attribute KeyValue conforming to the +// "k8s.statefulset.name" semantic conventions. It represents the name of the +// StatefulSet. +func K8SStatefulSetName(val string) attribute.KeyValue { + return K8SStatefulSetNameKey.String(val) +} + +// K8SStatefulSetUID returns an attribute KeyValue conforming to the +// "k8s.statefulset.uid" semantic conventions. It represents the UID of the +// StatefulSet. +func K8SStatefulSetUID(val string) attribute.KeyValue { + return K8SStatefulSetUIDKey.String(val) +} + +// K8SVolumeName returns an attribute KeyValue conforming to the +// "k8s.volume.name" semantic conventions. It represents the name of the K8s +// volume. +func K8SVolumeName(val string) attribute.KeyValue { + return K8SVolumeNameKey.String(val) +} + +// Enum values for k8s.namespace.phase +var ( + // Active namespace phase as described by [K8s API] + // Stability: development + // + // [K8s API]: https://pkg.go.dev/k8s.io/api@v0.31.3/core/v1#NamespacePhase + K8SNamespacePhaseActive = K8SNamespacePhaseKey.String("active") + // Terminating namespace phase as described by [K8s API] + // Stability: development + // + // [K8s API]: https://pkg.go.dev/k8s.io/api@v0.31.3/core/v1#NamespacePhase + K8SNamespacePhaseTerminating = K8SNamespacePhaseKey.String("terminating") +) + +// Enum values for k8s.volume.type +var ( + // A [persistentVolumeClaim] volume + // Stability: development + // + // [persistentVolumeClaim]: https://v1-30.docs.kubernetes.io/docs/concepts/storage/volumes/#persistentvolumeclaim + K8SVolumeTypePersistentVolumeClaim = K8SVolumeTypeKey.String("persistentVolumeClaim") + // A [configMap] volume + // Stability: development + // + // [configMap]: https://v1-30.docs.kubernetes.io/docs/concepts/storage/volumes/#configmap + K8SVolumeTypeConfigMap = K8SVolumeTypeKey.String("configMap") + // A [downwardAPI] volume + // Stability: development + // + // [downwardAPI]: https://v1-30.docs.kubernetes.io/docs/concepts/storage/volumes/#downwardapi + K8SVolumeTypeDownwardAPI = K8SVolumeTypeKey.String("downwardAPI") + // An [emptyDir] volume + // Stability: development + // + // [emptyDir]: https://v1-30.docs.kubernetes.io/docs/concepts/storage/volumes/#emptydir + K8SVolumeTypeEmptyDir = K8SVolumeTypeKey.String("emptyDir") + // A [secret] volume + // Stability: development + // + // [secret]: https://v1-30.docs.kubernetes.io/docs/concepts/storage/volumes/#secret + K8SVolumeTypeSecret = K8SVolumeTypeKey.String("secret") + // A [local] volume + // Stability: development + // + // [local]: https://v1-30.docs.kubernetes.io/docs/concepts/storage/volumes/#local + K8SVolumeTypeLocal = K8SVolumeTypeKey.String("local") +) + +// Namespace: linux +const ( + // LinuxMemorySlabStateKey is the attribute Key conforming to the + // "linux.memory.slab.state" semantic conventions. It represents the Linux Slab + // memory state. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "reclaimable", "unreclaimable" + LinuxMemorySlabStateKey = attribute.Key("linux.memory.slab.state") +) + +// Enum values for linux.memory.slab.state +var ( + // reclaimable + // Stability: development + LinuxMemorySlabStateReclaimable = LinuxMemorySlabStateKey.String("reclaimable") + // unreclaimable + // Stability: development + LinuxMemorySlabStateUnreclaimable = LinuxMemorySlabStateKey.String("unreclaimable") +) + +// Namespace: log +const ( + // LogFileNameKey is the attribute Key conforming to the "log.file.name" + // semantic conventions. It represents the basename of the file. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "audit.log" + LogFileNameKey = attribute.Key("log.file.name") + + // LogFileNameResolvedKey is the attribute Key conforming to the + // "log.file.name_resolved" semantic conventions. It represents the basename of + // the file, with symlinks resolved. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "uuid.log" + LogFileNameResolvedKey = attribute.Key("log.file.name_resolved") + + // LogFilePathKey is the attribute Key conforming to the "log.file.path" + // semantic conventions. It represents the full path to the file. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "/var/log/mysql/audit.log" + LogFilePathKey = attribute.Key("log.file.path") + + // LogFilePathResolvedKey is the attribute Key conforming to the + // "log.file.path_resolved" semantic conventions. It represents the full path to + // the file, with symlinks resolved. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "/var/lib/docker/uuid.log" + LogFilePathResolvedKey = attribute.Key("log.file.path_resolved") + + // LogIostreamKey is the attribute Key conforming to the "log.iostream" semantic + // conventions. It represents the stream associated with the log. See below for + // a list of well-known values. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + LogIostreamKey = attribute.Key("log.iostream") + + // LogRecordOriginalKey is the attribute Key conforming to the + // "log.record.original" semantic conventions. It represents the complete + // original Log Record. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "77 <86>1 2015-08-06T21:58:59.694Z 192.168.2.133 inactive - - - + // Something happened", "[INFO] 8/3/24 12:34:56 Something happened" + // Note: This value MAY be added when processing a Log Record which was + // originally transmitted as a string or equivalent data type AND the Body field + // of the Log Record does not contain the same value. (e.g. a syslog or a log + // record read from a file.) + LogRecordOriginalKey = attribute.Key("log.record.original") + + // LogRecordUIDKey is the attribute Key conforming to the "log.record.uid" + // semantic conventions. It represents a unique identifier for the Log Record. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "01ARZ3NDEKTSV4RRFFQ69G5FAV" + // Note: If an id is provided, other log records with the same id will be + // considered duplicates and can be removed safely. This means, that two + // distinguishable log records MUST have different values. + // The id MAY be an + // [Universally Unique Lexicographically Sortable Identifier (ULID)], but other + // identifiers (e.g. UUID) may be used as needed. + // + // [Universally Unique Lexicographically Sortable Identifier (ULID)]: https://github.com/ulid/spec + LogRecordUIDKey = attribute.Key("log.record.uid") +) + +// LogFileName returns an attribute KeyValue conforming to the "log.file.name" +// semantic conventions. It represents the basename of the file. +func LogFileName(val string) attribute.KeyValue { + return LogFileNameKey.String(val) +} + +// LogFileNameResolved returns an attribute KeyValue conforming to the +// "log.file.name_resolved" semantic conventions. It represents the basename of +// the file, with symlinks resolved. +func LogFileNameResolved(val string) attribute.KeyValue { + return LogFileNameResolvedKey.String(val) +} + +// LogFilePath returns an attribute KeyValue conforming to the "log.file.path" +// semantic conventions. It represents the full path to the file. +func LogFilePath(val string) attribute.KeyValue { + return LogFilePathKey.String(val) +} + +// LogFilePathResolved returns an attribute KeyValue conforming to the +// "log.file.path_resolved" semantic conventions. It represents the full path to +// the file, with symlinks resolved. +func LogFilePathResolved(val string) attribute.KeyValue { + return LogFilePathResolvedKey.String(val) +} + +// LogRecordOriginal returns an attribute KeyValue conforming to the +// "log.record.original" semantic conventions. It represents the complete +// original Log Record. +func LogRecordOriginal(val string) attribute.KeyValue { + return LogRecordOriginalKey.String(val) +} + +// LogRecordUID returns an attribute KeyValue conforming to the "log.record.uid" +// semantic conventions. It represents a unique identifier for the Log Record. +func LogRecordUID(val string) attribute.KeyValue { + return LogRecordUIDKey.String(val) +} + +// Enum values for log.iostream +var ( + // Logs from stdout stream + // Stability: development + LogIostreamStdout = LogIostreamKey.String("stdout") + // Events from stderr stream + // Stability: development + LogIostreamStderr = LogIostreamKey.String("stderr") +) + +// Namespace: messaging +const ( + // MessagingBatchMessageCountKey is the attribute Key conforming to the + // "messaging.batch.message_count" semantic conventions. It represents the + // number of messages sent, received, or processed in the scope of the batching + // operation. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 0, 1, 2 + // Note: Instrumentations SHOULD NOT set `messaging.batch.message_count` on + // spans that operate with a single message. When a messaging client library + // supports both batch and single-message API for the same operation, + // instrumentations SHOULD use `messaging.batch.message_count` for batching APIs + // and SHOULD NOT use it for single-message APIs. + MessagingBatchMessageCountKey = attribute.Key("messaging.batch.message_count") + + // MessagingClientIDKey is the attribute Key conforming to the + // "messaging.client.id" semantic conventions. It represents a unique identifier + // for the client that consumes or produces a message. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "client-5", "myhost@8742@s8083jm" + MessagingClientIDKey = attribute.Key("messaging.client.id") + + // MessagingConsumerGroupNameKey is the attribute Key conforming to the + // "messaging.consumer.group.name" semantic conventions. It represents the name + // of the consumer group with which a consumer is associated. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-group", "indexer" + // Note: Semantic conventions for individual messaging systems SHOULD document + // whether `messaging.consumer.group.name` is applicable and what it means in + // the context of that system. + MessagingConsumerGroupNameKey = attribute.Key("messaging.consumer.group.name") + + // MessagingDestinationAnonymousKey is the attribute Key conforming to the + // "messaging.destination.anonymous" semantic conventions. It represents a + // boolean that is true if the message destination is anonymous (could be + // unnamed or have auto-generated name). + // + // Type: boolean + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + MessagingDestinationAnonymousKey = attribute.Key("messaging.destination.anonymous") + + // MessagingDestinationNameKey is the attribute Key conforming to the + // "messaging.destination.name" semantic conventions. It represents the message + // destination name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "MyQueue", "MyTopic" + // Note: Destination name SHOULD uniquely identify a specific queue, topic or + // other entity within the broker. If + // the broker doesn't have such notion, the destination name SHOULD uniquely + // identify the broker. + MessagingDestinationNameKey = attribute.Key("messaging.destination.name") + + // MessagingDestinationPartitionIDKey is the attribute Key conforming to the + // "messaging.destination.partition.id" semantic conventions. It represents the + // identifier of the partition messages are sent to or received from, unique + // within the `messaging.destination.name`. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1 + MessagingDestinationPartitionIDKey = attribute.Key("messaging.destination.partition.id") + + // MessagingDestinationSubscriptionNameKey is the attribute Key conforming to + // the "messaging.destination.subscription.name" semantic conventions. It + // represents the name of the destination subscription from which a message is + // consumed. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "subscription-a" + // Note: Semantic conventions for individual messaging systems SHOULD document + // whether `messaging.destination.subscription.name` is applicable and what it + // means in the context of that system. + MessagingDestinationSubscriptionNameKey = attribute.Key("messaging.destination.subscription.name") + + // MessagingDestinationTemplateKey is the attribute Key conforming to the + // "messaging.destination.template" semantic conventions. It represents the low + // cardinality representation of the messaging destination name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "/customers/{customerId}" + // Note: Destination names could be constructed from templates. An example would + // be a destination name involving a user name or product id. Although the + // destination name in this case is of high cardinality, the underlying template + // is of low cardinality and can be effectively used for grouping and + // aggregation. + MessagingDestinationTemplateKey = attribute.Key("messaging.destination.template") + + // MessagingDestinationTemporaryKey is the attribute Key conforming to the + // "messaging.destination.temporary" semantic conventions. It represents a + // boolean that is true if the message destination is temporary and might not + // exist anymore after messages are processed. + // + // Type: boolean + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + MessagingDestinationTemporaryKey = attribute.Key("messaging.destination.temporary") + + // MessagingEventHubsMessageEnqueuedTimeKey is the attribute Key conforming to + // the "messaging.eventhubs.message.enqueued_time" semantic conventions. It + // represents the UTC epoch seconds at which the message has been accepted and + // stored in the entity. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + MessagingEventHubsMessageEnqueuedTimeKey = attribute.Key("messaging.eventhubs.message.enqueued_time") + + // MessagingGCPPubSubMessageAckDeadlineKey is the attribute Key conforming to + // the "messaging.gcp_pubsub.message.ack_deadline" semantic conventions. It + // represents the ack deadline in seconds set for the modify ack deadline + // request. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + MessagingGCPPubSubMessageAckDeadlineKey = attribute.Key("messaging.gcp_pubsub.message.ack_deadline") + + // MessagingGCPPubSubMessageAckIDKey is the attribute Key conforming to the + // "messaging.gcp_pubsub.message.ack_id" semantic conventions. It represents the + // ack id for a given message. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: ack_id + MessagingGCPPubSubMessageAckIDKey = attribute.Key("messaging.gcp_pubsub.message.ack_id") + + // MessagingGCPPubSubMessageDeliveryAttemptKey is the attribute Key conforming + // to the "messaging.gcp_pubsub.message.delivery_attempt" semantic conventions. + // It represents the delivery attempt for a given message. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + MessagingGCPPubSubMessageDeliveryAttemptKey = attribute.Key("messaging.gcp_pubsub.message.delivery_attempt") + + // MessagingGCPPubSubMessageOrderingKeyKey is the attribute Key conforming to + // the "messaging.gcp_pubsub.message.ordering_key" semantic conventions. It + // represents the ordering key for a given message. If the attribute is not + // present, the message does not have an ordering key. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: ordering_key + MessagingGCPPubSubMessageOrderingKeyKey = attribute.Key("messaging.gcp_pubsub.message.ordering_key") + + // MessagingKafkaMessageKeyKey is the attribute Key conforming to the + // "messaging.kafka.message.key" semantic conventions. It represents the message + // keys in Kafka are used for grouping alike messages to ensure they're + // processed on the same partition. They differ from `messaging.message.id` in + // that they're not unique. If the key is `null`, the attribute MUST NOT be set. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: myKey + // Note: If the key type is not string, it's string representation has to be + // supplied for the attribute. If the key has no unambiguous, canonical string + // form, don't include its value. + MessagingKafkaMessageKeyKey = attribute.Key("messaging.kafka.message.key") + + // MessagingKafkaMessageTombstoneKey is the attribute Key conforming to the + // "messaging.kafka.message.tombstone" semantic conventions. It represents a + // boolean that is true if the message is a tombstone. + // + // Type: boolean + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + MessagingKafkaMessageTombstoneKey = attribute.Key("messaging.kafka.message.tombstone") + + // MessagingKafkaOffsetKey is the attribute Key conforming to the + // "messaging.kafka.offset" semantic conventions. It represents the offset of a + // record in the corresponding Kafka partition. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + MessagingKafkaOffsetKey = attribute.Key("messaging.kafka.offset") + + // MessagingMessageBodySizeKey is the attribute Key conforming to the + // "messaging.message.body.size" semantic conventions. It represents the size of + // the message body in bytes. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Note: This can refer to both the compressed or uncompressed body size. If + // both sizes are known, the uncompressed + // body size should be used. + MessagingMessageBodySizeKey = attribute.Key("messaging.message.body.size") + + // MessagingMessageConversationIDKey is the attribute Key conforming to the + // "messaging.message.conversation_id" semantic conventions. It represents the + // conversation ID identifying the conversation to which the message belongs, + // represented as a string. Sometimes called "Correlation ID". + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: MyConversationId + MessagingMessageConversationIDKey = attribute.Key("messaging.message.conversation_id") + + // MessagingMessageEnvelopeSizeKey is the attribute Key conforming to the + // "messaging.message.envelope.size" semantic conventions. It represents the + // size of the message body and metadata in bytes. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Note: This can refer to both the compressed or uncompressed size. If both + // sizes are known, the uncompressed + // size should be used. + MessagingMessageEnvelopeSizeKey = attribute.Key("messaging.message.envelope.size") + + // MessagingMessageIDKey is the attribute Key conforming to the + // "messaging.message.id" semantic conventions. It represents a value used by + // the messaging system as an identifier for the message, represented as a + // string. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 452a7c7c7c7048c2f887f61572b18fc2 + MessagingMessageIDKey = attribute.Key("messaging.message.id") + + // MessagingOperationNameKey is the attribute Key conforming to the + // "messaging.operation.name" semantic conventions. It represents the + // system-specific name of the messaging operation. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "ack", "nack", "send" + MessagingOperationNameKey = attribute.Key("messaging.operation.name") + + // MessagingOperationTypeKey is the attribute Key conforming to the + // "messaging.operation.type" semantic conventions. It represents a string + // identifying the type of the messaging operation. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: If a custom value is used, it MUST be of low cardinality. + MessagingOperationTypeKey = attribute.Key("messaging.operation.type") + + // MessagingRabbitMQDestinationRoutingKeyKey is the attribute Key conforming to + // the "messaging.rabbitmq.destination.routing_key" semantic conventions. It + // represents the rabbitMQ message routing key. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: myKey + MessagingRabbitMQDestinationRoutingKeyKey = attribute.Key("messaging.rabbitmq.destination.routing_key") + + // MessagingRabbitMQMessageDeliveryTagKey is the attribute Key conforming to the + // "messaging.rabbitmq.message.delivery_tag" semantic conventions. It represents + // the rabbitMQ message delivery tag. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + MessagingRabbitMQMessageDeliveryTagKey = attribute.Key("messaging.rabbitmq.message.delivery_tag") + + // MessagingRocketMQConsumptionModelKey is the attribute Key conforming to the + // "messaging.rocketmq.consumption_model" semantic conventions. It represents + // the model of message consumption. This only applies to consumer spans. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + MessagingRocketMQConsumptionModelKey = attribute.Key("messaging.rocketmq.consumption_model") + + // MessagingRocketMQMessageDelayTimeLevelKey is the attribute Key conforming to + // the "messaging.rocketmq.message.delay_time_level" semantic conventions. It + // represents the delay time level for delay message, which determines the + // message delay time. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + MessagingRocketMQMessageDelayTimeLevelKey = attribute.Key("messaging.rocketmq.message.delay_time_level") + + // MessagingRocketMQMessageDeliveryTimestampKey is the attribute Key conforming + // to the "messaging.rocketmq.message.delivery_timestamp" semantic conventions. + // It represents the timestamp in milliseconds that the delay message is + // expected to be delivered to consumer. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + MessagingRocketMQMessageDeliveryTimestampKey = attribute.Key("messaging.rocketmq.message.delivery_timestamp") + + // MessagingRocketMQMessageGroupKey is the attribute Key conforming to the + // "messaging.rocketmq.message.group" semantic conventions. It represents the it + // is essential for FIFO message. Messages that belong to the same message group + // are always processed one by one within the same consumer group. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: myMessageGroup + MessagingRocketMQMessageGroupKey = attribute.Key("messaging.rocketmq.message.group") + + // MessagingRocketMQMessageKeysKey is the attribute Key conforming to the + // "messaging.rocketmq.message.keys" semantic conventions. It represents the + // key(s) of message, another way to mark message besides message id. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "keyA", "keyB" + MessagingRocketMQMessageKeysKey = attribute.Key("messaging.rocketmq.message.keys") + + // MessagingRocketMQMessageTagKey is the attribute Key conforming to the + // "messaging.rocketmq.message.tag" semantic conventions. It represents the + // secondary classifier of message besides topic. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: tagA + MessagingRocketMQMessageTagKey = attribute.Key("messaging.rocketmq.message.tag") + + // MessagingRocketMQMessageTypeKey is the attribute Key conforming to the + // "messaging.rocketmq.message.type" semantic conventions. It represents the + // type of message. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + MessagingRocketMQMessageTypeKey = attribute.Key("messaging.rocketmq.message.type") + + // MessagingRocketMQNamespaceKey is the attribute Key conforming to the + // "messaging.rocketmq.namespace" semantic conventions. It represents the + // namespace of RocketMQ resources, resources in different namespaces are + // individual. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: myNamespace + MessagingRocketMQNamespaceKey = attribute.Key("messaging.rocketmq.namespace") + + // MessagingServiceBusDispositionStatusKey is the attribute Key conforming to + // the "messaging.servicebus.disposition_status" semantic conventions. It + // represents the describes the [settlement type]. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // + // [settlement type]: https://learn.microsoft.com/azure/service-bus-messaging/message-transfers-locks-settlement#peeklock + MessagingServiceBusDispositionStatusKey = attribute.Key("messaging.servicebus.disposition_status") + + // MessagingServiceBusMessageDeliveryCountKey is the attribute Key conforming to + // the "messaging.servicebus.message.delivery_count" semantic conventions. It + // represents the number of deliveries that have been attempted for this + // message. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + MessagingServiceBusMessageDeliveryCountKey = attribute.Key("messaging.servicebus.message.delivery_count") + + // MessagingServiceBusMessageEnqueuedTimeKey is the attribute Key conforming to + // the "messaging.servicebus.message.enqueued_time" semantic conventions. It + // represents the UTC epoch seconds at which the message has been accepted and + // stored in the entity. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + MessagingServiceBusMessageEnqueuedTimeKey = attribute.Key("messaging.servicebus.message.enqueued_time") + + // MessagingSystemKey is the attribute Key conforming to the "messaging.system" + // semantic conventions. It represents the messaging system as identified by the + // client instrumentation. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: The actual messaging system may differ from the one known by the + // client. For example, when using Kafka client libraries to communicate with + // Azure Event Hubs, the `messaging.system` is set to `kafka` based on the + // instrumentation's best knowledge. + MessagingSystemKey = attribute.Key("messaging.system") +) + +// MessagingBatchMessageCount returns an attribute KeyValue conforming to the +// "messaging.batch.message_count" semantic conventions. It represents the number +// of messages sent, received, or processed in the scope of the batching +// operation. +func MessagingBatchMessageCount(val int) attribute.KeyValue { + return MessagingBatchMessageCountKey.Int(val) +} + +// MessagingClientID returns an attribute KeyValue conforming to the +// "messaging.client.id" semantic conventions. It represents a unique identifier +// for the client that consumes or produces a message. +func MessagingClientID(val string) attribute.KeyValue { + return MessagingClientIDKey.String(val) +} + +// MessagingConsumerGroupName returns an attribute KeyValue conforming to the +// "messaging.consumer.group.name" semantic conventions. It represents the name +// of the consumer group with which a consumer is associated. +func MessagingConsumerGroupName(val string) attribute.KeyValue { + return MessagingConsumerGroupNameKey.String(val) +} + +// MessagingDestinationAnonymous returns an attribute KeyValue conforming to the +// "messaging.destination.anonymous" semantic conventions. It represents a +// boolean that is true if the message destination is anonymous (could be unnamed +// or have auto-generated name). +func MessagingDestinationAnonymous(val bool) attribute.KeyValue { + return MessagingDestinationAnonymousKey.Bool(val) +} + +// MessagingDestinationName returns an attribute KeyValue conforming to the +// "messaging.destination.name" semantic conventions. It represents the message +// destination name. +func MessagingDestinationName(val string) attribute.KeyValue { + return MessagingDestinationNameKey.String(val) +} + +// MessagingDestinationPartitionID returns an attribute KeyValue conforming to +// the "messaging.destination.partition.id" semantic conventions. It represents +// the identifier of the partition messages are sent to or received from, unique +// within the `messaging.destination.name`. +func MessagingDestinationPartitionID(val string) attribute.KeyValue { + return MessagingDestinationPartitionIDKey.String(val) +} + +// MessagingDestinationSubscriptionName returns an attribute KeyValue conforming +// to the "messaging.destination.subscription.name" semantic conventions. It +// represents the name of the destination subscription from which a message is +// consumed. +func MessagingDestinationSubscriptionName(val string) attribute.KeyValue { + return MessagingDestinationSubscriptionNameKey.String(val) +} + +// MessagingDestinationTemplate returns an attribute KeyValue conforming to the +// "messaging.destination.template" semantic conventions. It represents the low +// cardinality representation of the messaging destination name. +func MessagingDestinationTemplate(val string) attribute.KeyValue { + return MessagingDestinationTemplateKey.String(val) +} + +// MessagingDestinationTemporary returns an attribute KeyValue conforming to the +// "messaging.destination.temporary" semantic conventions. It represents a +// boolean that is true if the message destination is temporary and might not +// exist anymore after messages are processed. +func MessagingDestinationTemporary(val bool) attribute.KeyValue { + return MessagingDestinationTemporaryKey.Bool(val) +} + +// MessagingEventHubsMessageEnqueuedTime returns an attribute KeyValue conforming +// to the "messaging.eventhubs.message.enqueued_time" semantic conventions. It +// represents the UTC epoch seconds at which the message has been accepted and +// stored in the entity. +func MessagingEventHubsMessageEnqueuedTime(val int) attribute.KeyValue { + return MessagingEventHubsMessageEnqueuedTimeKey.Int(val) +} + +// MessagingGCPPubSubMessageAckDeadline returns an attribute KeyValue conforming +// to the "messaging.gcp_pubsub.message.ack_deadline" semantic conventions. It +// represents the ack deadline in seconds set for the modify ack deadline +// request. +func MessagingGCPPubSubMessageAckDeadline(val int) attribute.KeyValue { + return MessagingGCPPubSubMessageAckDeadlineKey.Int(val) +} + +// MessagingGCPPubSubMessageAckID returns an attribute KeyValue conforming to the +// "messaging.gcp_pubsub.message.ack_id" semantic conventions. It represents the +// ack id for a given message. +func MessagingGCPPubSubMessageAckID(val string) attribute.KeyValue { + return MessagingGCPPubSubMessageAckIDKey.String(val) +} + +// MessagingGCPPubSubMessageDeliveryAttempt returns an attribute KeyValue +// conforming to the "messaging.gcp_pubsub.message.delivery_attempt" semantic +// conventions. It represents the delivery attempt for a given message. +func MessagingGCPPubSubMessageDeliveryAttempt(val int) attribute.KeyValue { + return MessagingGCPPubSubMessageDeliveryAttemptKey.Int(val) +} + +// MessagingGCPPubSubMessageOrderingKey returns an attribute KeyValue conforming +// to the "messaging.gcp_pubsub.message.ordering_key" semantic conventions. It +// represents the ordering key for a given message. If the attribute is not +// present, the message does not have an ordering key. +func MessagingGCPPubSubMessageOrderingKey(val string) attribute.KeyValue { + return MessagingGCPPubSubMessageOrderingKeyKey.String(val) +} + +// MessagingKafkaMessageKey returns an attribute KeyValue conforming to the +// "messaging.kafka.message.key" semantic conventions. It represents the message +// keys in Kafka are used for grouping alike messages to ensure they're processed +// on the same partition. They differ from `messaging.message.id` in that they're +// not unique. If the key is `null`, the attribute MUST NOT be set. +func MessagingKafkaMessageKey(val string) attribute.KeyValue { + return MessagingKafkaMessageKeyKey.String(val) +} + +// MessagingKafkaMessageTombstone returns an attribute KeyValue conforming to the +// "messaging.kafka.message.tombstone" semantic conventions. It represents a +// boolean that is true if the message is a tombstone. +func MessagingKafkaMessageTombstone(val bool) attribute.KeyValue { + return MessagingKafkaMessageTombstoneKey.Bool(val) +} + +// MessagingKafkaOffset returns an attribute KeyValue conforming to the +// "messaging.kafka.offset" semantic conventions. It represents the offset of a +// record in the corresponding Kafka partition. +func MessagingKafkaOffset(val int) attribute.KeyValue { + return MessagingKafkaOffsetKey.Int(val) +} + +// MessagingMessageBodySize returns an attribute KeyValue conforming to the +// "messaging.message.body.size" semantic conventions. It represents the size of +// the message body in bytes. +func MessagingMessageBodySize(val int) attribute.KeyValue { + return MessagingMessageBodySizeKey.Int(val) +} + +// MessagingMessageConversationID returns an attribute KeyValue conforming to the +// "messaging.message.conversation_id" semantic conventions. It represents the +// conversation ID identifying the conversation to which the message belongs, +// represented as a string. Sometimes called "Correlation ID". +func MessagingMessageConversationID(val string) attribute.KeyValue { + return MessagingMessageConversationIDKey.String(val) +} + +// MessagingMessageEnvelopeSize returns an attribute KeyValue conforming to the +// "messaging.message.envelope.size" semantic conventions. It represents the size +// of the message body and metadata in bytes. +func MessagingMessageEnvelopeSize(val int) attribute.KeyValue { + return MessagingMessageEnvelopeSizeKey.Int(val) +} + +// MessagingMessageID returns an attribute KeyValue conforming to the +// "messaging.message.id" semantic conventions. It represents a value used by the +// messaging system as an identifier for the message, represented as a string. +func MessagingMessageID(val string) attribute.KeyValue { + return MessagingMessageIDKey.String(val) +} + +// MessagingOperationName returns an attribute KeyValue conforming to the +// "messaging.operation.name" semantic conventions. It represents the +// system-specific name of the messaging operation. +func MessagingOperationName(val string) attribute.KeyValue { + return MessagingOperationNameKey.String(val) +} + +// MessagingRabbitMQDestinationRoutingKey returns an attribute KeyValue +// conforming to the "messaging.rabbitmq.destination.routing_key" semantic +// conventions. It represents the rabbitMQ message routing key. +func MessagingRabbitMQDestinationRoutingKey(val string) attribute.KeyValue { + return MessagingRabbitMQDestinationRoutingKeyKey.String(val) +} + +// MessagingRabbitMQMessageDeliveryTag returns an attribute KeyValue conforming +// to the "messaging.rabbitmq.message.delivery_tag" semantic conventions. It +// represents the rabbitMQ message delivery tag. +func MessagingRabbitMQMessageDeliveryTag(val int) attribute.KeyValue { + return MessagingRabbitMQMessageDeliveryTagKey.Int(val) +} + +// MessagingRocketMQMessageDelayTimeLevel returns an attribute KeyValue +// conforming to the "messaging.rocketmq.message.delay_time_level" semantic +// conventions. It represents the delay time level for delay message, which +// determines the message delay time. +func MessagingRocketMQMessageDelayTimeLevel(val int) attribute.KeyValue { + return MessagingRocketMQMessageDelayTimeLevelKey.Int(val) +} + +// MessagingRocketMQMessageDeliveryTimestamp returns an attribute KeyValue +// conforming to the "messaging.rocketmq.message.delivery_timestamp" semantic +// conventions. It represents the timestamp in milliseconds that the delay +// message is expected to be delivered to consumer. +func MessagingRocketMQMessageDeliveryTimestamp(val int) attribute.KeyValue { + return MessagingRocketMQMessageDeliveryTimestampKey.Int(val) +} + +// MessagingRocketMQMessageGroup returns an attribute KeyValue conforming to the +// "messaging.rocketmq.message.group" semantic conventions. It represents the it +// is essential for FIFO message. Messages that belong to the same message group +// are always processed one by one within the same consumer group. +func MessagingRocketMQMessageGroup(val string) attribute.KeyValue { + return MessagingRocketMQMessageGroupKey.String(val) +} + +// MessagingRocketMQMessageKeys returns an attribute KeyValue conforming to the +// "messaging.rocketmq.message.keys" semantic conventions. It represents the +// key(s) of message, another way to mark message besides message id. +func MessagingRocketMQMessageKeys(val ...string) attribute.KeyValue { + return MessagingRocketMQMessageKeysKey.StringSlice(val) +} + +// MessagingRocketMQMessageTag returns an attribute KeyValue conforming to the +// "messaging.rocketmq.message.tag" semantic conventions. It represents the +// secondary classifier of message besides topic. +func MessagingRocketMQMessageTag(val string) attribute.KeyValue { + return MessagingRocketMQMessageTagKey.String(val) +} + +// MessagingRocketMQNamespace returns an attribute KeyValue conforming to the +// "messaging.rocketmq.namespace" semantic conventions. It represents the +// namespace of RocketMQ resources, resources in different namespaces are +// individual. +func MessagingRocketMQNamespace(val string) attribute.KeyValue { + return MessagingRocketMQNamespaceKey.String(val) +} + +// MessagingServiceBusMessageDeliveryCount returns an attribute KeyValue +// conforming to the "messaging.servicebus.message.delivery_count" semantic +// conventions. It represents the number of deliveries that have been attempted +// for this message. +func MessagingServiceBusMessageDeliveryCount(val int) attribute.KeyValue { + return MessagingServiceBusMessageDeliveryCountKey.Int(val) +} + +// MessagingServiceBusMessageEnqueuedTime returns an attribute KeyValue +// conforming to the "messaging.servicebus.message.enqueued_time" semantic +// conventions. It represents the UTC epoch seconds at which the message has been +// accepted and stored in the entity. +func MessagingServiceBusMessageEnqueuedTime(val int) attribute.KeyValue { + return MessagingServiceBusMessageEnqueuedTimeKey.Int(val) +} + +// Enum values for messaging.operation.type +var ( + // A message is created. "Create" spans always refer to a single message and are + // used to provide a unique creation context for messages in batch sending + // scenarios. + // + // Stability: development + MessagingOperationTypeCreate = MessagingOperationTypeKey.String("create") + // One or more messages are provided for sending to an intermediary. If a single + // message is sent, the context of the "Send" span can be used as the creation + // context and no "Create" span needs to be created. + // + // Stability: development + MessagingOperationTypeSend = MessagingOperationTypeKey.String("send") + // One or more messages are requested by a consumer. This operation refers to + // pull-based scenarios, where consumers explicitly call methods of messaging + // SDKs to receive messages. + // + // Stability: development + MessagingOperationTypeReceive = MessagingOperationTypeKey.String("receive") + // One or more messages are processed by a consumer. + // + // Stability: development + MessagingOperationTypeProcess = MessagingOperationTypeKey.String("process") + // One or more messages are settled. + // + // Stability: development + MessagingOperationTypeSettle = MessagingOperationTypeKey.String("settle") + // Deprecated: Replaced by `process`. + MessagingOperationTypeDeliver = MessagingOperationTypeKey.String("deliver") + // Deprecated: Replaced by `send`. + MessagingOperationTypePublish = MessagingOperationTypeKey.String("publish") +) + +// Enum values for messaging.rocketmq.consumption_model +var ( + // Clustering consumption model + // Stability: development + MessagingRocketMQConsumptionModelClustering = MessagingRocketMQConsumptionModelKey.String("clustering") + // Broadcasting consumption model + // Stability: development + MessagingRocketMQConsumptionModelBroadcasting = MessagingRocketMQConsumptionModelKey.String("broadcasting") +) + +// Enum values for messaging.rocketmq.message.type +var ( + // Normal message + // Stability: development + MessagingRocketMQMessageTypeNormal = MessagingRocketMQMessageTypeKey.String("normal") + // FIFO message + // Stability: development + MessagingRocketMQMessageTypeFifo = MessagingRocketMQMessageTypeKey.String("fifo") + // Delay message + // Stability: development + MessagingRocketMQMessageTypeDelay = MessagingRocketMQMessageTypeKey.String("delay") + // Transaction message + // Stability: development + MessagingRocketMQMessageTypeTransaction = MessagingRocketMQMessageTypeKey.String("transaction") +) + +// Enum values for messaging.servicebus.disposition_status +var ( + // Message is completed + // Stability: development + MessagingServiceBusDispositionStatusComplete = MessagingServiceBusDispositionStatusKey.String("complete") + // Message is abandoned + // Stability: development + MessagingServiceBusDispositionStatusAbandon = MessagingServiceBusDispositionStatusKey.String("abandon") + // Message is sent to dead letter queue + // Stability: development + MessagingServiceBusDispositionStatusDeadLetter = MessagingServiceBusDispositionStatusKey.String("dead_letter") + // Message is deferred + // Stability: development + MessagingServiceBusDispositionStatusDefer = MessagingServiceBusDispositionStatusKey.String("defer") +) + +// Enum values for messaging.system +var ( + // Apache ActiveMQ + // Stability: development + MessagingSystemActiveMQ = MessagingSystemKey.String("activemq") + // Amazon Simple Queue Service (SQS) + // Stability: development + MessagingSystemAWSSQS = MessagingSystemKey.String("aws_sqs") + // Azure Event Grid + // Stability: development + MessagingSystemEventGrid = MessagingSystemKey.String("eventgrid") + // Azure Event Hubs + // Stability: development + MessagingSystemEventHubs = MessagingSystemKey.String("eventhubs") + // Azure Service Bus + // Stability: development + MessagingSystemServiceBus = MessagingSystemKey.String("servicebus") + // Google Cloud Pub/Sub + // Stability: development + MessagingSystemGCPPubSub = MessagingSystemKey.String("gcp_pubsub") + // Java Message Service + // Stability: development + MessagingSystemJMS = MessagingSystemKey.String("jms") + // Apache Kafka + // Stability: development + MessagingSystemKafka = MessagingSystemKey.String("kafka") + // RabbitMQ + // Stability: development + MessagingSystemRabbitMQ = MessagingSystemKey.String("rabbitmq") + // Apache RocketMQ + // Stability: development + MessagingSystemRocketMQ = MessagingSystemKey.String("rocketmq") + // Apache Pulsar + // Stability: development + MessagingSystemPulsar = MessagingSystemKey.String("pulsar") +) + +// Namespace: network +const ( + // NetworkCarrierICCKey is the attribute Key conforming to the + // "network.carrier.icc" semantic conventions. It represents the ISO 3166-1 + // alpha-2 2-character country code associated with the mobile carrier network. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: DE + NetworkCarrierICCKey = attribute.Key("network.carrier.icc") + + // NetworkCarrierMCCKey is the attribute Key conforming to the + // "network.carrier.mcc" semantic conventions. It represents the mobile carrier + // country code. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 310 + NetworkCarrierMCCKey = attribute.Key("network.carrier.mcc") + + // NetworkCarrierMNCKey is the attribute Key conforming to the + // "network.carrier.mnc" semantic conventions. It represents the mobile carrier + // network code. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 001 + NetworkCarrierMNCKey = attribute.Key("network.carrier.mnc") + + // NetworkCarrierNameKey is the attribute Key conforming to the + // "network.carrier.name" semantic conventions. It represents the name of the + // mobile carrier. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: sprint + NetworkCarrierNameKey = attribute.Key("network.carrier.name") + + // NetworkConnectionStateKey is the attribute Key conforming to the + // "network.connection.state" semantic conventions. It represents the state of + // network connection. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "close_wait" + // Note: Connection states are defined as part of the [rfc9293] + // + // [rfc9293]: https://datatracker.ietf.org/doc/html/rfc9293#section-3.3.2 + NetworkConnectionStateKey = attribute.Key("network.connection.state") + + // NetworkConnectionSubtypeKey is the attribute Key conforming to the + // "network.connection.subtype" semantic conventions. It represents the this + // describes more details regarding the connection.type. It may be the type of + // cell technology connection, but it could be used for describing details about + // a wifi connection. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: LTE + NetworkConnectionSubtypeKey = attribute.Key("network.connection.subtype") + + // NetworkConnectionTypeKey is the attribute Key conforming to the + // "network.connection.type" semantic conventions. It represents the internet + // connection type. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: wifi + NetworkConnectionTypeKey = attribute.Key("network.connection.type") + + // NetworkInterfaceNameKey is the attribute Key conforming to the + // "network.interface.name" semantic conventions. It represents the network + // interface name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "lo", "eth0" + NetworkInterfaceNameKey = attribute.Key("network.interface.name") + + // NetworkIODirectionKey is the attribute Key conforming to the + // "network.io.direction" semantic conventions. It represents the network IO + // operation direction. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "transmit" + NetworkIODirectionKey = attribute.Key("network.io.direction") + + // NetworkLocalAddressKey is the attribute Key conforming to the + // "network.local.address" semantic conventions. It represents the local address + // of the network connection - IP address or Unix domain socket name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "10.1.2.80", "/tmp/my.sock" + NetworkLocalAddressKey = attribute.Key("network.local.address") + + // NetworkLocalPortKey is the attribute Key conforming to the + // "network.local.port" semantic conventions. It represents the local port + // number of the network connection. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: 65123 + NetworkLocalPortKey = attribute.Key("network.local.port") + + // NetworkPeerAddressKey is the attribute Key conforming to the + // "network.peer.address" semantic conventions. It represents the peer address + // of the network connection - IP address or Unix domain socket name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "10.1.2.80", "/tmp/my.sock" + NetworkPeerAddressKey = attribute.Key("network.peer.address") + + // NetworkPeerPortKey is the attribute Key conforming to the "network.peer.port" + // semantic conventions. It represents the peer port number of the network + // connection. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: 65123 + NetworkPeerPortKey = attribute.Key("network.peer.port") + + // NetworkProtocolNameKey is the attribute Key conforming to the + // "network.protocol.name" semantic conventions. It represents the + // [OSI application layer] or non-OSI equivalent. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "amqp", "http", "mqtt" + // Note: The value SHOULD be normalized to lowercase. + // + // [OSI application layer]: https://wikipedia.org/wiki/Application_layer + NetworkProtocolNameKey = attribute.Key("network.protocol.name") + + // NetworkProtocolVersionKey is the attribute Key conforming to the + // "network.protocol.version" semantic conventions. It represents the actual + // version of the protocol used for network communication. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "1.1", "2" + // Note: If protocol version is subject to negotiation (for example using [ALPN] + // ), this attribute SHOULD be set to the negotiated version. If the actual + // protocol version is not known, this attribute SHOULD NOT be set. + // + // [ALPN]: https://www.rfc-editor.org/rfc/rfc7301.html + NetworkProtocolVersionKey = attribute.Key("network.protocol.version") + + // NetworkTransportKey is the attribute Key conforming to the + // "network.transport" semantic conventions. It represents the + // [OSI transport layer] or [inter-process communication method]. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "tcp", "udp" + // Note: The value SHOULD be normalized to lowercase. + // + // Consider always setting the transport when setting a port number, since + // a port number is ambiguous without knowing the transport. For example + // different processes could be listening on TCP port 12345 and UDP port 12345. + // + // [OSI transport layer]: https://wikipedia.org/wiki/Transport_layer + // [inter-process communication method]: https://wikipedia.org/wiki/Inter-process_communication + NetworkTransportKey = attribute.Key("network.transport") + + // NetworkTypeKey is the attribute Key conforming to the "network.type" semantic + // conventions. It represents the [OSI network layer] or non-OSI equivalent. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "ipv4", "ipv6" + // Note: The value SHOULD be normalized to lowercase. + // + // [OSI network layer]: https://wikipedia.org/wiki/Network_layer + NetworkTypeKey = attribute.Key("network.type") +) + +// NetworkCarrierICC returns an attribute KeyValue conforming to the +// "network.carrier.icc" semantic conventions. It represents the ISO 3166-1 +// alpha-2 2-character country code associated with the mobile carrier network. +func NetworkCarrierICC(val string) attribute.KeyValue { + return NetworkCarrierICCKey.String(val) +} + +// NetworkCarrierMCC returns an attribute KeyValue conforming to the +// "network.carrier.mcc" semantic conventions. It represents the mobile carrier +// country code. +func NetworkCarrierMCC(val string) attribute.KeyValue { + return NetworkCarrierMCCKey.String(val) +} + +// NetworkCarrierMNC returns an attribute KeyValue conforming to the +// "network.carrier.mnc" semantic conventions. It represents the mobile carrier +// network code. +func NetworkCarrierMNC(val string) attribute.KeyValue { + return NetworkCarrierMNCKey.String(val) +} + +// NetworkCarrierName returns an attribute KeyValue conforming to the +// "network.carrier.name" semantic conventions. It represents the name of the +// mobile carrier. +func NetworkCarrierName(val string) attribute.KeyValue { + return NetworkCarrierNameKey.String(val) +} + +// NetworkInterfaceName returns an attribute KeyValue conforming to the +// "network.interface.name" semantic conventions. It represents the network +// interface name. +func NetworkInterfaceName(val string) attribute.KeyValue { + return NetworkInterfaceNameKey.String(val) +} + +// NetworkLocalAddress returns an attribute KeyValue conforming to the +// "network.local.address" semantic conventions. It represents the local address +// of the network connection - IP address or Unix domain socket name. +func NetworkLocalAddress(val string) attribute.KeyValue { + return NetworkLocalAddressKey.String(val) +} + +// NetworkLocalPort returns an attribute KeyValue conforming to the +// "network.local.port" semantic conventions. It represents the local port number +// of the network connection. +func NetworkLocalPort(val int) attribute.KeyValue { + return NetworkLocalPortKey.Int(val) +} + +// NetworkPeerAddress returns an attribute KeyValue conforming to the +// "network.peer.address" semantic conventions. It represents the peer address of +// the network connection - IP address or Unix domain socket name. +func NetworkPeerAddress(val string) attribute.KeyValue { + return NetworkPeerAddressKey.String(val) +} + +// NetworkPeerPort returns an attribute KeyValue conforming to the +// "network.peer.port" semantic conventions. It represents the peer port number +// of the network connection. +func NetworkPeerPort(val int) attribute.KeyValue { + return NetworkPeerPortKey.Int(val) +} + +// NetworkProtocolName returns an attribute KeyValue conforming to the +// "network.protocol.name" semantic conventions. It represents the +// [OSI application layer] or non-OSI equivalent. +// +// [OSI application layer]: https://wikipedia.org/wiki/Application_layer +func NetworkProtocolName(val string) attribute.KeyValue { + return NetworkProtocolNameKey.String(val) +} + +// NetworkProtocolVersion returns an attribute KeyValue conforming to the +// "network.protocol.version" semantic conventions. It represents the actual +// version of the protocol used for network communication. +func NetworkProtocolVersion(val string) attribute.KeyValue { + return NetworkProtocolVersionKey.String(val) +} + +// Enum values for network.connection.state +var ( + // closed + // Stability: development + NetworkConnectionStateClosed = NetworkConnectionStateKey.String("closed") + // close_wait + // Stability: development + NetworkConnectionStateCloseWait = NetworkConnectionStateKey.String("close_wait") + // closing + // Stability: development + NetworkConnectionStateClosing = NetworkConnectionStateKey.String("closing") + // established + // Stability: development + NetworkConnectionStateEstablished = NetworkConnectionStateKey.String("established") + // fin_wait_1 + // Stability: development + NetworkConnectionStateFinWait1 = NetworkConnectionStateKey.String("fin_wait_1") + // fin_wait_2 + // Stability: development + NetworkConnectionStateFinWait2 = NetworkConnectionStateKey.String("fin_wait_2") + // last_ack + // Stability: development + NetworkConnectionStateLastAck = NetworkConnectionStateKey.String("last_ack") + // listen + // Stability: development + NetworkConnectionStateListen = NetworkConnectionStateKey.String("listen") + // syn_received + // Stability: development + NetworkConnectionStateSynReceived = NetworkConnectionStateKey.String("syn_received") + // syn_sent + // Stability: development + NetworkConnectionStateSynSent = NetworkConnectionStateKey.String("syn_sent") + // time_wait + // Stability: development + NetworkConnectionStateTimeWait = NetworkConnectionStateKey.String("time_wait") +) + +// Enum values for network.connection.subtype +var ( + // GPRS + // Stability: development + NetworkConnectionSubtypeGprs = NetworkConnectionSubtypeKey.String("gprs") + // EDGE + // Stability: development + NetworkConnectionSubtypeEdge = NetworkConnectionSubtypeKey.String("edge") + // UMTS + // Stability: development + NetworkConnectionSubtypeUmts = NetworkConnectionSubtypeKey.String("umts") + // CDMA + // Stability: development + NetworkConnectionSubtypeCdma = NetworkConnectionSubtypeKey.String("cdma") + // EVDO Rel. 0 + // Stability: development + NetworkConnectionSubtypeEvdo0 = NetworkConnectionSubtypeKey.String("evdo_0") + // EVDO Rev. A + // Stability: development + NetworkConnectionSubtypeEvdoA = NetworkConnectionSubtypeKey.String("evdo_a") + // CDMA2000 1XRTT + // Stability: development + NetworkConnectionSubtypeCdma20001xrtt = NetworkConnectionSubtypeKey.String("cdma2000_1xrtt") + // HSDPA + // Stability: development + NetworkConnectionSubtypeHsdpa = NetworkConnectionSubtypeKey.String("hsdpa") + // HSUPA + // Stability: development + NetworkConnectionSubtypeHsupa = NetworkConnectionSubtypeKey.String("hsupa") + // HSPA + // Stability: development + NetworkConnectionSubtypeHspa = NetworkConnectionSubtypeKey.String("hspa") + // IDEN + // Stability: development + NetworkConnectionSubtypeIden = NetworkConnectionSubtypeKey.String("iden") + // EVDO Rev. B + // Stability: development + NetworkConnectionSubtypeEvdoB = NetworkConnectionSubtypeKey.String("evdo_b") + // LTE + // Stability: development + NetworkConnectionSubtypeLte = NetworkConnectionSubtypeKey.String("lte") + // EHRPD + // Stability: development + NetworkConnectionSubtypeEhrpd = NetworkConnectionSubtypeKey.String("ehrpd") + // HSPAP + // Stability: development + NetworkConnectionSubtypeHspap = NetworkConnectionSubtypeKey.String("hspap") + // GSM + // Stability: development + NetworkConnectionSubtypeGsm = NetworkConnectionSubtypeKey.String("gsm") + // TD-SCDMA + // Stability: development + NetworkConnectionSubtypeTdScdma = NetworkConnectionSubtypeKey.String("td_scdma") + // IWLAN + // Stability: development + NetworkConnectionSubtypeIwlan = NetworkConnectionSubtypeKey.String("iwlan") + // 5G NR (New Radio) + // Stability: development + NetworkConnectionSubtypeNr = NetworkConnectionSubtypeKey.String("nr") + // 5G NRNSA (New Radio Non-Standalone) + // Stability: development + NetworkConnectionSubtypeNrnsa = NetworkConnectionSubtypeKey.String("nrnsa") + // LTE CA + // Stability: development + NetworkConnectionSubtypeLteCa = NetworkConnectionSubtypeKey.String("lte_ca") +) + +// Enum values for network.connection.type +var ( + // wifi + // Stability: development + NetworkConnectionTypeWifi = NetworkConnectionTypeKey.String("wifi") + // wired + // Stability: development + NetworkConnectionTypeWired = NetworkConnectionTypeKey.String("wired") + // cell + // Stability: development + NetworkConnectionTypeCell = NetworkConnectionTypeKey.String("cell") + // unavailable + // Stability: development + NetworkConnectionTypeUnavailable = NetworkConnectionTypeKey.String("unavailable") + // unknown + // Stability: development + NetworkConnectionTypeUnknown = NetworkConnectionTypeKey.String("unknown") +) + +// Enum values for network.io.direction +var ( + // transmit + // Stability: development + NetworkIODirectionTransmit = NetworkIODirectionKey.String("transmit") + // receive + // Stability: development + NetworkIODirectionReceive = NetworkIODirectionKey.String("receive") +) + +// Enum values for network.transport +var ( + // TCP + // Stability: stable + NetworkTransportTCP = NetworkTransportKey.String("tcp") + // UDP + // Stability: stable + NetworkTransportUDP = NetworkTransportKey.String("udp") + // Named or anonymous pipe. + // Stability: stable + NetworkTransportPipe = NetworkTransportKey.String("pipe") + // Unix domain socket + // Stability: stable + NetworkTransportUnix = NetworkTransportKey.String("unix") + // QUIC + // Stability: stable + NetworkTransportQUIC = NetworkTransportKey.String("quic") +) + +// Enum values for network.type +var ( + // IPv4 + // Stability: stable + NetworkTypeIPv4 = NetworkTypeKey.String("ipv4") + // IPv6 + // Stability: stable + NetworkTypeIPv6 = NetworkTypeKey.String("ipv6") +) + +// Namespace: oci +const ( + // OCIManifestDigestKey is the attribute Key conforming to the + // "oci.manifest.digest" semantic conventions. It represents the digest of the + // OCI image manifest. For container images specifically is the digest by which + // the container image is known. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "sha256:e4ca62c0d62f3e886e684806dfe9d4e0cda60d54986898173c1083856cfda0f4" + // Note: Follows [OCI Image Manifest Specification], and specifically the + // [Digest property]. + // An example can be found in [Example Image Manifest]. + // + // [OCI Image Manifest Specification]: https://github.com/opencontainers/image-spec/blob/main/manifest.md + // [Digest property]: https://github.com/opencontainers/image-spec/blob/main/descriptor.md#digests + // [Example Image Manifest]: https://github.com/opencontainers/image-spec/blob/main/manifest.md#example-image-manifest + OCIManifestDigestKey = attribute.Key("oci.manifest.digest") +) + +// OCIManifestDigest returns an attribute KeyValue conforming to the +// "oci.manifest.digest" semantic conventions. It represents the digest of the +// OCI image manifest. For container images specifically is the digest by which +// the container image is known. +func OCIManifestDigest(val string) attribute.KeyValue { + return OCIManifestDigestKey.String(val) +} + +// Namespace: opentracing +const ( + // OpenTracingRefTypeKey is the attribute Key conforming to the + // "opentracing.ref_type" semantic conventions. It represents the parent-child + // Reference type. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: The causal relationship between a child Span and a parent Span. + OpenTracingRefTypeKey = attribute.Key("opentracing.ref_type") +) + +// Enum values for opentracing.ref_type +var ( + // The parent Span depends on the child Span in some capacity + // Stability: development + OpenTracingRefTypeChildOf = OpenTracingRefTypeKey.String("child_of") + // The parent Span doesn't depend in any way on the result of the child Span + // Stability: development + OpenTracingRefTypeFollowsFrom = OpenTracingRefTypeKey.String("follows_from") +) + +// Namespace: os +const ( + // OSBuildIDKey is the attribute Key conforming to the "os.build_id" semantic + // conventions. It represents the unique identifier for a particular build or + // compilation of the operating system. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "TQ3C.230805.001.B2", "20E247", "22621" + OSBuildIDKey = attribute.Key("os.build_id") + + // OSDescriptionKey is the attribute Key conforming to the "os.description" + // semantic conventions. It represents the human readable (not intended to be + // parsed) OS version information, like e.g. reported by `ver` or + // `lsb_release -a` commands. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Microsoft Windows [Version 10.0.18363.778]", "Ubuntu 18.04.1 LTS" + OSDescriptionKey = attribute.Key("os.description") + + // OSNameKey is the attribute Key conforming to the "os.name" semantic + // conventions. It represents the human readable operating system name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "iOS", "Android", "Ubuntu" + OSNameKey = attribute.Key("os.name") + + // OSTypeKey is the attribute Key conforming to the "os.type" semantic + // conventions. It represents the operating system type. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + OSTypeKey = attribute.Key("os.type") + + // OSVersionKey is the attribute Key conforming to the "os.version" semantic + // conventions. It represents the version string of the operating system as + // defined in [Version Attributes]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "14.2.1", "18.04.1" + // + // [Version Attributes]: /docs/resource/README.md#version-attributes + OSVersionKey = attribute.Key("os.version") +) + +// OSBuildID returns an attribute KeyValue conforming to the "os.build_id" +// semantic conventions. It represents the unique identifier for a particular +// build or compilation of the operating system. +func OSBuildID(val string) attribute.KeyValue { + return OSBuildIDKey.String(val) +} + +// OSDescription returns an attribute KeyValue conforming to the "os.description" +// semantic conventions. It represents the human readable (not intended to be +// parsed) OS version information, like e.g. reported by `ver` or +// `lsb_release -a` commands. +func OSDescription(val string) attribute.KeyValue { + return OSDescriptionKey.String(val) +} + +// OSName returns an attribute KeyValue conforming to the "os.name" semantic +// conventions. It represents the human readable operating system name. +func OSName(val string) attribute.KeyValue { + return OSNameKey.String(val) +} + +// OSVersion returns an attribute KeyValue conforming to the "os.version" +// semantic conventions. It represents the version string of the operating system +// as defined in [Version Attributes]. +// +// [Version Attributes]: /docs/resource/README.md#version-attributes +func OSVersion(val string) attribute.KeyValue { + return OSVersionKey.String(val) +} + +// Enum values for os.type +var ( + // Microsoft Windows + // Stability: development + OSTypeWindows = OSTypeKey.String("windows") + // Linux + // Stability: development + OSTypeLinux = OSTypeKey.String("linux") + // Apple Darwin + // Stability: development + OSTypeDarwin = OSTypeKey.String("darwin") + // FreeBSD + // Stability: development + OSTypeFreeBSD = OSTypeKey.String("freebsd") + // NetBSD + // Stability: development + OSTypeNetBSD = OSTypeKey.String("netbsd") + // OpenBSD + // Stability: development + OSTypeOpenBSD = OSTypeKey.String("openbsd") + // DragonFly BSD + // Stability: development + OSTypeDragonflyBSD = OSTypeKey.String("dragonflybsd") + // HP-UX (Hewlett Packard Unix) + // Stability: development + OSTypeHPUX = OSTypeKey.String("hpux") + // AIX (Advanced Interactive eXecutive) + // Stability: development + OSTypeAIX = OSTypeKey.String("aix") + // SunOS, Oracle Solaris + // Stability: development + OSTypeSolaris = OSTypeKey.String("solaris") + // IBM z/OS + // Stability: development + OSTypeZOS = OSTypeKey.String("z_os") +) + +// Namespace: otel +const ( + // OTelComponentNameKey is the attribute Key conforming to the + // "otel.component.name" semantic conventions. It represents a name uniquely + // identifying the instance of the OpenTelemetry component within its containing + // SDK instance. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "otlp_grpc_span_exporter/0", "custom-name" + // Note: Implementations SHOULD ensure a low cardinality for this attribute, + // even across application or SDK restarts. + // E.g. implementations MUST NOT use UUIDs as values for this attribute. + // + // Implementations MAY achieve these goals by following a + // `/` pattern, e.g. + // `batching_span_processor/0`. + // Hereby `otel.component.type` refers to the corresponding attribute value of + // the component. + // + // The value of `instance-counter` MAY be automatically assigned by the + // component and uniqueness within the enclosing SDK instance MUST be + // guaranteed. + // For example, `` MAY be implemented by using a monotonically + // increasing counter (starting with `0`), which is incremented every time an + // instance of the given component type is started. + // + // With this implementation, for example the first Batching Span Processor would + // have `batching_span_processor/0` + // as `otel.component.name`, the second one `batching_span_processor/1` and so + // on. + // These values will therefore be reused in the case of an application restart. + OTelComponentNameKey = attribute.Key("otel.component.name") + + // OTelComponentTypeKey is the attribute Key conforming to the + // "otel.component.type" semantic conventions. It represents a name identifying + // the type of the OpenTelemetry component. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "batching_span_processor", "com.example.MySpanExporter" + // Note: If none of the standardized values apply, implementations SHOULD use + // the language-defined name of the type. + // E.g. for Java the fully qualified classname SHOULD be used in this case. + OTelComponentTypeKey = attribute.Key("otel.component.type") + + // OTelScopeNameKey is the attribute Key conforming to the "otel.scope.name" + // semantic conventions. It represents the name of the instrumentation scope - ( + // `InstrumentationScope.Name` in OTLP). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "io.opentelemetry.contrib.mongodb" + OTelScopeNameKey = attribute.Key("otel.scope.name") + + // OTelScopeVersionKey is the attribute Key conforming to the + // "otel.scope.version" semantic conventions. It represents the version of the + // instrumentation scope - (`InstrumentationScope.Version` in OTLP). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "1.0.0" + OTelScopeVersionKey = attribute.Key("otel.scope.version") + + // OTelSpanSamplingResultKey is the attribute Key conforming to the + // "otel.span.sampling_result" semantic conventions. It represents the result + // value of the sampler for this span. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + OTelSpanSamplingResultKey = attribute.Key("otel.span.sampling_result") + + // OTelStatusCodeKey is the attribute Key conforming to the "otel.status_code" + // semantic conventions. It represents the name of the code, either "OK" or + // "ERROR". MUST NOT be set if the status code is UNSET. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: + OTelStatusCodeKey = attribute.Key("otel.status_code") + + // OTelStatusDescriptionKey is the attribute Key conforming to the + // "otel.status_description" semantic conventions. It represents the description + // of the Status if it has a value, otherwise not set. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "resource not found" + OTelStatusDescriptionKey = attribute.Key("otel.status_description") +) + +// OTelComponentName returns an attribute KeyValue conforming to the +// "otel.component.name" semantic conventions. It represents a name uniquely +// identifying the instance of the OpenTelemetry component within its containing +// SDK instance. +func OTelComponentName(val string) attribute.KeyValue { + return OTelComponentNameKey.String(val) +} + +// OTelScopeName returns an attribute KeyValue conforming to the +// "otel.scope.name" semantic conventions. It represents the name of the +// instrumentation scope - (`InstrumentationScope.Name` in OTLP). +func OTelScopeName(val string) attribute.KeyValue { + return OTelScopeNameKey.String(val) +} + +// OTelScopeVersion returns an attribute KeyValue conforming to the +// "otel.scope.version" semantic conventions. It represents the version of the +// instrumentation scope - (`InstrumentationScope.Version` in OTLP). +func OTelScopeVersion(val string) attribute.KeyValue { + return OTelScopeVersionKey.String(val) +} + +// OTelStatusDescription returns an attribute KeyValue conforming to the +// "otel.status_description" semantic conventions. It represents the description +// of the Status if it has a value, otherwise not set. +func OTelStatusDescription(val string) attribute.KeyValue { + return OTelStatusDescriptionKey.String(val) +} + +// Enum values for otel.component.type +var ( + // The builtin SDK batching span processor + // + // Stability: development + OTelComponentTypeBatchingSpanProcessor = OTelComponentTypeKey.String("batching_span_processor") + // The builtin SDK simple span processor + // + // Stability: development + OTelComponentTypeSimpleSpanProcessor = OTelComponentTypeKey.String("simple_span_processor") + // The builtin SDK batching log record processor + // + // Stability: development + OTelComponentTypeBatchingLogProcessor = OTelComponentTypeKey.String("batching_log_processor") + // The builtin SDK simple log record processor + // + // Stability: development + OTelComponentTypeSimpleLogProcessor = OTelComponentTypeKey.String("simple_log_processor") + // OTLP span exporter over gRPC with protobuf serialization + // + // Stability: development + OTelComponentTypeOtlpGRPCSpanExporter = OTelComponentTypeKey.String("otlp_grpc_span_exporter") + // OTLP span exporter over HTTP with protobuf serialization + // + // Stability: development + OTelComponentTypeOtlpHTTPSpanExporter = OTelComponentTypeKey.String("otlp_http_span_exporter") + // OTLP span exporter over HTTP with JSON serialization + // + // Stability: development + OTelComponentTypeOtlpHTTPJSONSpanExporter = OTelComponentTypeKey.String("otlp_http_json_span_exporter") + // OTLP log record exporter over gRPC with protobuf serialization + // + // Stability: development + OTelComponentTypeOtlpGRPCLogExporter = OTelComponentTypeKey.String("otlp_grpc_log_exporter") + // OTLP log record exporter over HTTP with protobuf serialization + // + // Stability: development + OTelComponentTypeOtlpHTTPLogExporter = OTelComponentTypeKey.String("otlp_http_log_exporter") + // OTLP log record exporter over HTTP with JSON serialization + // + // Stability: development + OTelComponentTypeOtlpHTTPJSONLogExporter = OTelComponentTypeKey.String("otlp_http_json_log_exporter") + // The builtin SDK periodically exporting metric reader + // + // Stability: development + OTelComponentTypePeriodicMetricReader = OTelComponentTypeKey.String("periodic_metric_reader") + // OTLP metric exporter over gRPC with protobuf serialization + // + // Stability: development + OTelComponentTypeOtlpGRPCMetricExporter = OTelComponentTypeKey.String("otlp_grpc_metric_exporter") + // OTLP metric exporter over HTTP with protobuf serialization + // + // Stability: development + OTelComponentTypeOtlpHTTPMetricExporter = OTelComponentTypeKey.String("otlp_http_metric_exporter") + // OTLP metric exporter over HTTP with JSON serialization + // + // Stability: development + OTelComponentTypeOtlpHTTPJSONMetricExporter = OTelComponentTypeKey.String("otlp_http_json_metric_exporter") +) + +// Enum values for otel.span.sampling_result +var ( + // The span is not sampled and not recording + // Stability: development + OTelSpanSamplingResultDrop = OTelSpanSamplingResultKey.String("DROP") + // The span is not sampled, but recording + // Stability: development + OTelSpanSamplingResultRecordOnly = OTelSpanSamplingResultKey.String("RECORD_ONLY") + // The span is sampled and recording + // Stability: development + OTelSpanSamplingResultRecordAndSample = OTelSpanSamplingResultKey.String("RECORD_AND_SAMPLE") +) + +// Enum values for otel.status_code +var ( + // The operation has been validated by an Application developer or Operator to + // have completed successfully. + // Stability: stable + OTelStatusCodeOk = OTelStatusCodeKey.String("OK") + // The operation contains an error. + // Stability: stable + OTelStatusCodeError = OTelStatusCodeKey.String("ERROR") +) + +// Namespace: peer +const ( + // PeerServiceKey is the attribute Key conforming to the "peer.service" semantic + // conventions. It represents the [`service.name`] of the remote service. SHOULD + // be equal to the actual `service.name` resource attribute of the remote + // service if any. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: AuthTokenCache + // + // [`service.name`]: /docs/resource/README.md#service + PeerServiceKey = attribute.Key("peer.service") +) + +// PeerService returns an attribute KeyValue conforming to the "peer.service" +// semantic conventions. It represents the [`service.name`] of the remote +// service. SHOULD be equal to the actual `service.name` resource attribute of +// the remote service if any. +// +// [`service.name`]: /docs/resource/README.md#service +func PeerService(val string) attribute.KeyValue { + return PeerServiceKey.String(val) +} + +// Namespace: process +const ( + // ProcessArgsCountKey is the attribute Key conforming to the + // "process.args_count" semantic conventions. It represents the length of the + // process.command_args array. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 4 + // Note: This field can be useful for querying or performing bucket analysis on + // how many arguments were provided to start a process. More arguments may be an + // indication of suspicious activity. + ProcessArgsCountKey = attribute.Key("process.args_count") + + // ProcessCommandKey is the attribute Key conforming to the "process.command" + // semantic conventions. It represents the command used to launch the process + // (i.e. the command name). On Linux based systems, can be set to the zeroth + // string in `proc/[pid]/cmdline`. On Windows, can be set to the first parameter + // extracted from `GetCommandLineW`. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "cmd/otelcol" + ProcessCommandKey = attribute.Key("process.command") + + // ProcessCommandArgsKey is the attribute Key conforming to the + // "process.command_args" semantic conventions. It represents the all the + // command arguments (including the command/executable itself) as received by + // the process. On Linux-based systems (and some other Unixoid systems + // supporting procfs), can be set according to the list of null-delimited + // strings extracted from `proc/[pid]/cmdline`. For libc-based executables, this + // would be the full argv vector passed to `main`. SHOULD NOT be collected by + // default unless there is sanitization that excludes sensitive data. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "cmd/otecol", "--config=config.yaml" + ProcessCommandArgsKey = attribute.Key("process.command_args") + + // ProcessCommandLineKey is the attribute Key conforming to the + // "process.command_line" semantic conventions. It represents the full command + // used to launch the process as a single string representing the full command. + // On Windows, can be set to the result of `GetCommandLineW`. Do not set this if + // you have to assemble it just for monitoring; use `process.command_args` + // instead. SHOULD NOT be collected by default unless there is sanitization that + // excludes sensitive data. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "C:\cmd\otecol --config="my directory\config.yaml"" + ProcessCommandLineKey = attribute.Key("process.command_line") + + // ProcessContextSwitchTypeKey is the attribute Key conforming to the + // "process.context_switch_type" semantic conventions. It represents the + // specifies whether the context switches for this data point were voluntary or + // involuntary. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + ProcessContextSwitchTypeKey = attribute.Key("process.context_switch_type") + + // ProcessCreationTimeKey is the attribute Key conforming to the + // "process.creation.time" semantic conventions. It represents the date and time + // the process was created, in ISO 8601 format. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2023-11-21T09:25:34.853Z" + ProcessCreationTimeKey = attribute.Key("process.creation.time") + + // ProcessExecutableBuildIDGNUKey is the attribute Key conforming to the + // "process.executable.build_id.gnu" semantic conventions. It represents the GNU + // build ID as found in the `.note.gnu.build-id` ELF section (hex string). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "c89b11207f6479603b0d49bf291c092c2b719293" + ProcessExecutableBuildIDGNUKey = attribute.Key("process.executable.build_id.gnu") + + // ProcessExecutableBuildIDGoKey is the attribute Key conforming to the + // "process.executable.build_id.go" semantic conventions. It represents the Go + // build ID as retrieved by `go tool buildid `. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "foh3mEXu7BLZjsN9pOwG/kATcXlYVCDEFouRMQed_/WwRFB1hPo9LBkekthSPG/x8hMC8emW2cCjXD0_1aY" + ProcessExecutableBuildIDGoKey = attribute.Key("process.executable.build_id.go") + + // ProcessExecutableBuildIDHtlhashKey is the attribute Key conforming to the + // "process.executable.build_id.htlhash" semantic conventions. It represents the + // profiling specific build ID for executables. See the OTel specification for + // Profiles for more information. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "600DCAFE4A110000F2BF38C493F5FB92" + ProcessExecutableBuildIDHtlhashKey = attribute.Key("process.executable.build_id.htlhash") + + // ProcessExecutableNameKey is the attribute Key conforming to the + // "process.executable.name" semantic conventions. It represents the name of the + // process executable. On Linux based systems, this SHOULD be set to the base + // name of the target of `/proc/[pid]/exe`. On Windows, this SHOULD be set to + // the base name of `GetProcessImageFileNameW`. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "otelcol" + ProcessExecutableNameKey = attribute.Key("process.executable.name") + + // ProcessExecutablePathKey is the attribute Key conforming to the + // "process.executable.path" semantic conventions. It represents the full path + // to the process executable. On Linux based systems, can be set to the target + // of `proc/[pid]/exe`. On Windows, can be set to the result of + // `GetProcessImageFileNameW`. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "/usr/bin/cmd/otelcol" + ProcessExecutablePathKey = attribute.Key("process.executable.path") + + // ProcessExitCodeKey is the attribute Key conforming to the "process.exit.code" + // semantic conventions. It represents the exit code of the process. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 127 + ProcessExitCodeKey = attribute.Key("process.exit.code") + + // ProcessExitTimeKey is the attribute Key conforming to the "process.exit.time" + // semantic conventions. It represents the date and time the process exited, in + // ISO 8601 format. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2023-11-21T09:26:12.315Z" + ProcessExitTimeKey = attribute.Key("process.exit.time") + + // ProcessGroupLeaderPIDKey is the attribute Key conforming to the + // "process.group_leader.pid" semantic conventions. It represents the PID of the + // process's group leader. This is also the process group ID (PGID) of the + // process. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 23 + ProcessGroupLeaderPIDKey = attribute.Key("process.group_leader.pid") + + // ProcessInteractiveKey is the attribute Key conforming to the + // "process.interactive" semantic conventions. It represents the whether the + // process is connected to an interactive shell. + // + // Type: boolean + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + ProcessInteractiveKey = attribute.Key("process.interactive") + + // ProcessLinuxCgroupKey is the attribute Key conforming to the + // "process.linux.cgroup" semantic conventions. It represents the control group + // associated with the process. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1:name=systemd:/user.slice/user-1000.slice/session-3.scope", + // "0::/user.slice/user-1000.slice/user@1000.service/tmux-spawn-0267755b-4639-4a27-90ed-f19f88e53748.scope" + // Note: Control groups (cgroups) are a kernel feature used to organize and + // manage process resources. This attribute provides the path(s) to the + // cgroup(s) associated with the process, which should match the contents of the + // [/proc/[PID]/cgroup] file. + // + // [/proc/[PID]/cgroup]: https://man7.org/linux/man-pages/man7/cgroups.7.html + ProcessLinuxCgroupKey = attribute.Key("process.linux.cgroup") + + // ProcessOwnerKey is the attribute Key conforming to the "process.owner" + // semantic conventions. It represents the username of the user that owns the + // process. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "root" + ProcessOwnerKey = attribute.Key("process.owner") + + // ProcessPagingFaultTypeKey is the attribute Key conforming to the + // "process.paging.fault_type" semantic conventions. It represents the type of + // page fault for this data point. Type `major` is for major/hard page faults, + // and `minor` is for minor/soft page faults. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + ProcessPagingFaultTypeKey = attribute.Key("process.paging.fault_type") + + // ProcessParentPIDKey is the attribute Key conforming to the + // "process.parent_pid" semantic conventions. It represents the parent Process + // identifier (PPID). + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 111 + ProcessParentPIDKey = attribute.Key("process.parent_pid") + + // ProcessPIDKey is the attribute Key conforming to the "process.pid" semantic + // conventions. It represents the process identifier (PID). + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1234 + ProcessPIDKey = attribute.Key("process.pid") + + // ProcessRealUserIDKey is the attribute Key conforming to the + // "process.real_user.id" semantic conventions. It represents the real user ID + // (RUID) of the process. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1000 + ProcessRealUserIDKey = attribute.Key("process.real_user.id") + + // ProcessRealUserNameKey is the attribute Key conforming to the + // "process.real_user.name" semantic conventions. It represents the username of + // the real user of the process. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "operator" + ProcessRealUserNameKey = attribute.Key("process.real_user.name") + + // ProcessRuntimeDescriptionKey is the attribute Key conforming to the + // "process.runtime.description" semantic conventions. It represents an + // additional description about the runtime of the process, for example a + // specific vendor customization of the runtime environment. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: Eclipse OpenJ9 Eclipse OpenJ9 VM openj9-0.21.0 + ProcessRuntimeDescriptionKey = attribute.Key("process.runtime.description") + + // ProcessRuntimeNameKey is the attribute Key conforming to the + // "process.runtime.name" semantic conventions. It represents the name of the + // runtime of this process. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "OpenJDK Runtime Environment" + ProcessRuntimeNameKey = attribute.Key("process.runtime.name") + + // ProcessRuntimeVersionKey is the attribute Key conforming to the + // "process.runtime.version" semantic conventions. It represents the version of + // the runtime of this process, as returned by the runtime without modification. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 14.0.2 + ProcessRuntimeVersionKey = attribute.Key("process.runtime.version") + + // ProcessSavedUserIDKey is the attribute Key conforming to the + // "process.saved_user.id" semantic conventions. It represents the saved user ID + // (SUID) of the process. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1002 + ProcessSavedUserIDKey = attribute.Key("process.saved_user.id") + + // ProcessSavedUserNameKey is the attribute Key conforming to the + // "process.saved_user.name" semantic conventions. It represents the username of + // the saved user. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "operator" + ProcessSavedUserNameKey = attribute.Key("process.saved_user.name") + + // ProcessSessionLeaderPIDKey is the attribute Key conforming to the + // "process.session_leader.pid" semantic conventions. It represents the PID of + // the process's session leader. This is also the session ID (SID) of the + // process. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 14 + ProcessSessionLeaderPIDKey = attribute.Key("process.session_leader.pid") + + // ProcessTitleKey is the attribute Key conforming to the "process.title" + // semantic conventions. It represents the process title (proctitle). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "cat /etc/hostname", "xfce4-session", "bash" + // Note: In many Unix-like systems, process title (proctitle), is the string + // that represents the name or command line of a running process, displayed by + // system monitoring tools like ps, top, and htop. + ProcessTitleKey = attribute.Key("process.title") + + // ProcessUserIDKey is the attribute Key conforming to the "process.user.id" + // semantic conventions. It represents the effective user ID (EUID) of the + // process. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1001 + ProcessUserIDKey = attribute.Key("process.user.id") + + // ProcessUserNameKey is the attribute Key conforming to the "process.user.name" + // semantic conventions. It represents the username of the effective user of the + // process. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "root" + ProcessUserNameKey = attribute.Key("process.user.name") + + // ProcessVpidKey is the attribute Key conforming to the "process.vpid" semantic + // conventions. It represents the virtual process identifier. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 12 + // Note: The process ID within a PID namespace. This is not necessarily unique + // across all processes on the host but it is unique within the process + // namespace that the process exists within. + ProcessVpidKey = attribute.Key("process.vpid") + + // ProcessWorkingDirectoryKey is the attribute Key conforming to the + // "process.working_directory" semantic conventions. It represents the working + // directory of the process. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "/root" + ProcessWorkingDirectoryKey = attribute.Key("process.working_directory") +) + +// ProcessArgsCount returns an attribute KeyValue conforming to the +// "process.args_count" semantic conventions. It represents the length of the +// process.command_args array. +func ProcessArgsCount(val int) attribute.KeyValue { + return ProcessArgsCountKey.Int(val) +} + +// ProcessCommand returns an attribute KeyValue conforming to the +// "process.command" semantic conventions. It represents the command used to +// launch the process (i.e. the command name). On Linux based systems, can be set +// to the zeroth string in `proc/[pid]/cmdline`. On Windows, can be set to the +// first parameter extracted from `GetCommandLineW`. +func ProcessCommand(val string) attribute.KeyValue { + return ProcessCommandKey.String(val) +} + +// ProcessCommandArgs returns an attribute KeyValue conforming to the +// "process.command_args" semantic conventions. It represents the all the command +// arguments (including the command/executable itself) as received by the +// process. On Linux-based systems (and some other Unixoid systems supporting +// procfs), can be set according to the list of null-delimited strings extracted +// from `proc/[pid]/cmdline`. For libc-based executables, this would be the full +// argv vector passed to `main`. SHOULD NOT be collected by default unless there +// is sanitization that excludes sensitive data. +func ProcessCommandArgs(val ...string) attribute.KeyValue { + return ProcessCommandArgsKey.StringSlice(val) +} + +// ProcessCommandLine returns an attribute KeyValue conforming to the +// "process.command_line" semantic conventions. It represents the full command +// used to launch the process as a single string representing the full command. +// On Windows, can be set to the result of `GetCommandLineW`. Do not set this if +// you have to assemble it just for monitoring; use `process.command_args` +// instead. SHOULD NOT be collected by default unless there is sanitization that +// excludes sensitive data. +func ProcessCommandLine(val string) attribute.KeyValue { + return ProcessCommandLineKey.String(val) +} + +// ProcessCreationTime returns an attribute KeyValue conforming to the +// "process.creation.time" semantic conventions. It represents the date and time +// the process was created, in ISO 8601 format. +func ProcessCreationTime(val string) attribute.KeyValue { + return ProcessCreationTimeKey.String(val) +} + +// ProcessExecutableBuildIDGNU returns an attribute KeyValue conforming to the +// "process.executable.build_id.gnu" semantic conventions. It represents the GNU +// build ID as found in the `.note.gnu.build-id` ELF section (hex string). +func ProcessExecutableBuildIDGNU(val string) attribute.KeyValue { + return ProcessExecutableBuildIDGNUKey.String(val) +} + +// ProcessExecutableBuildIDGo returns an attribute KeyValue conforming to the +// "process.executable.build_id.go" semantic conventions. It represents the Go +// build ID as retrieved by `go tool buildid `. +func ProcessExecutableBuildIDGo(val string) attribute.KeyValue { + return ProcessExecutableBuildIDGoKey.String(val) +} + +// ProcessExecutableBuildIDHtlhash returns an attribute KeyValue conforming to +// the "process.executable.build_id.htlhash" semantic conventions. It represents +// the profiling specific build ID for executables. See the OTel specification +// for Profiles for more information. +func ProcessExecutableBuildIDHtlhash(val string) attribute.KeyValue { + return ProcessExecutableBuildIDHtlhashKey.String(val) +} + +// ProcessExecutableName returns an attribute KeyValue conforming to the +// "process.executable.name" semantic conventions. It represents the name of the +// process executable. On Linux based systems, this SHOULD be set to the base +// name of the target of `/proc/[pid]/exe`. On Windows, this SHOULD be set to the +// base name of `GetProcessImageFileNameW`. +func ProcessExecutableName(val string) attribute.KeyValue { + return ProcessExecutableNameKey.String(val) +} + +// ProcessExecutablePath returns an attribute KeyValue conforming to the +// "process.executable.path" semantic conventions. It represents the full path to +// the process executable. On Linux based systems, can be set to the target of +// `proc/[pid]/exe`. On Windows, can be set to the result of +// `GetProcessImageFileNameW`. +func ProcessExecutablePath(val string) attribute.KeyValue { + return ProcessExecutablePathKey.String(val) +} + +// ProcessExitCode returns an attribute KeyValue conforming to the +// "process.exit.code" semantic conventions. It represents the exit code of the +// process. +func ProcessExitCode(val int) attribute.KeyValue { + return ProcessExitCodeKey.Int(val) +} + +// ProcessExitTime returns an attribute KeyValue conforming to the +// "process.exit.time" semantic conventions. It represents the date and time the +// process exited, in ISO 8601 format. +func ProcessExitTime(val string) attribute.KeyValue { + return ProcessExitTimeKey.String(val) +} + +// ProcessGroupLeaderPID returns an attribute KeyValue conforming to the +// "process.group_leader.pid" semantic conventions. It represents the PID of the +// process's group leader. This is also the process group ID (PGID) of the +// process. +func ProcessGroupLeaderPID(val int) attribute.KeyValue { + return ProcessGroupLeaderPIDKey.Int(val) +} + +// ProcessInteractive returns an attribute KeyValue conforming to the +// "process.interactive" semantic conventions. It represents the whether the +// process is connected to an interactive shell. +func ProcessInteractive(val bool) attribute.KeyValue { + return ProcessInteractiveKey.Bool(val) +} + +// ProcessLinuxCgroup returns an attribute KeyValue conforming to the +// "process.linux.cgroup" semantic conventions. It represents the control group +// associated with the process. +func ProcessLinuxCgroup(val string) attribute.KeyValue { + return ProcessLinuxCgroupKey.String(val) +} + +// ProcessOwner returns an attribute KeyValue conforming to the "process.owner" +// semantic conventions. It represents the username of the user that owns the +// process. +func ProcessOwner(val string) attribute.KeyValue { + return ProcessOwnerKey.String(val) +} + +// ProcessParentPID returns an attribute KeyValue conforming to the +// "process.parent_pid" semantic conventions. It represents the parent Process +// identifier (PPID). +func ProcessParentPID(val int) attribute.KeyValue { + return ProcessParentPIDKey.Int(val) +} + +// ProcessPID returns an attribute KeyValue conforming to the "process.pid" +// semantic conventions. It represents the process identifier (PID). +func ProcessPID(val int) attribute.KeyValue { + return ProcessPIDKey.Int(val) +} + +// ProcessRealUserID returns an attribute KeyValue conforming to the +// "process.real_user.id" semantic conventions. It represents the real user ID +// (RUID) of the process. +func ProcessRealUserID(val int) attribute.KeyValue { + return ProcessRealUserIDKey.Int(val) +} + +// ProcessRealUserName returns an attribute KeyValue conforming to the +// "process.real_user.name" semantic conventions. It represents the username of +// the real user of the process. +func ProcessRealUserName(val string) attribute.KeyValue { + return ProcessRealUserNameKey.String(val) +} + +// ProcessRuntimeDescription returns an attribute KeyValue conforming to the +// "process.runtime.description" semantic conventions. It represents an +// additional description about the runtime of the process, for example a +// specific vendor customization of the runtime environment. +func ProcessRuntimeDescription(val string) attribute.KeyValue { + return ProcessRuntimeDescriptionKey.String(val) +} + +// ProcessRuntimeName returns an attribute KeyValue conforming to the +// "process.runtime.name" semantic conventions. It represents the name of the +// runtime of this process. +func ProcessRuntimeName(val string) attribute.KeyValue { + return ProcessRuntimeNameKey.String(val) +} + +// ProcessRuntimeVersion returns an attribute KeyValue conforming to the +// "process.runtime.version" semantic conventions. It represents the version of +// the runtime of this process, as returned by the runtime without modification. +func ProcessRuntimeVersion(val string) attribute.KeyValue { + return ProcessRuntimeVersionKey.String(val) +} + +// ProcessSavedUserID returns an attribute KeyValue conforming to the +// "process.saved_user.id" semantic conventions. It represents the saved user ID +// (SUID) of the process. +func ProcessSavedUserID(val int) attribute.KeyValue { + return ProcessSavedUserIDKey.Int(val) +} + +// ProcessSavedUserName returns an attribute KeyValue conforming to the +// "process.saved_user.name" semantic conventions. It represents the username of +// the saved user. +func ProcessSavedUserName(val string) attribute.KeyValue { + return ProcessSavedUserNameKey.String(val) +} + +// ProcessSessionLeaderPID returns an attribute KeyValue conforming to the +// "process.session_leader.pid" semantic conventions. It represents the PID of +// the process's session leader. This is also the session ID (SID) of the +// process. +func ProcessSessionLeaderPID(val int) attribute.KeyValue { + return ProcessSessionLeaderPIDKey.Int(val) +} + +// ProcessTitle returns an attribute KeyValue conforming to the "process.title" +// semantic conventions. It represents the process title (proctitle). +func ProcessTitle(val string) attribute.KeyValue { + return ProcessTitleKey.String(val) +} + +// ProcessUserID returns an attribute KeyValue conforming to the +// "process.user.id" semantic conventions. It represents the effective user ID +// (EUID) of the process. +func ProcessUserID(val int) attribute.KeyValue { + return ProcessUserIDKey.Int(val) +} + +// ProcessUserName returns an attribute KeyValue conforming to the +// "process.user.name" semantic conventions. It represents the username of the +// effective user of the process. +func ProcessUserName(val string) attribute.KeyValue { + return ProcessUserNameKey.String(val) +} + +// ProcessVpid returns an attribute KeyValue conforming to the "process.vpid" +// semantic conventions. It represents the virtual process identifier. +func ProcessVpid(val int) attribute.KeyValue { + return ProcessVpidKey.Int(val) +} + +// ProcessWorkingDirectory returns an attribute KeyValue conforming to the +// "process.working_directory" semantic conventions. It represents the working +// directory of the process. +func ProcessWorkingDirectory(val string) attribute.KeyValue { + return ProcessWorkingDirectoryKey.String(val) +} + +// Enum values for process.context_switch_type +var ( + // voluntary + // Stability: development + ProcessContextSwitchTypeVoluntary = ProcessContextSwitchTypeKey.String("voluntary") + // involuntary + // Stability: development + ProcessContextSwitchTypeInvoluntary = ProcessContextSwitchTypeKey.String("involuntary") +) + +// Enum values for process.paging.fault_type +var ( + // major + // Stability: development + ProcessPagingFaultTypeMajor = ProcessPagingFaultTypeKey.String("major") + // minor + // Stability: development + ProcessPagingFaultTypeMinor = ProcessPagingFaultTypeKey.String("minor") +) + +// Namespace: profile +const ( + // ProfileFrameTypeKey is the attribute Key conforming to the + // "profile.frame.type" semantic conventions. It represents the describes the + // interpreter or compiler of a single frame. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "cpython" + ProfileFrameTypeKey = attribute.Key("profile.frame.type") +) + +// Enum values for profile.frame.type +var ( + // [.NET] + // + // Stability: development + // + // [.NET]: https://wikipedia.org/wiki/.NET + ProfileFrameTypeDotnet = ProfileFrameTypeKey.String("dotnet") + // [JVM] + // + // Stability: development + // + // [JVM]: https://wikipedia.org/wiki/Java_virtual_machine + ProfileFrameTypeJVM = ProfileFrameTypeKey.String("jvm") + // [Kernel] + // + // Stability: development + // + // [Kernel]: https://wikipedia.org/wiki/Kernel_(operating_system) + ProfileFrameTypeKernel = ProfileFrameTypeKey.String("kernel") + // Can be one of but not limited to [C], [C++], [Go] or [Rust]. If possible, a + // more precise value MUST be used. + // + // Stability: development + // + // [C]: https://wikipedia.org/wiki/C_(programming_language) + // [C++]: https://wikipedia.org/wiki/C%2B%2B + // [Go]: https://wikipedia.org/wiki/Go_(programming_language) + // [Rust]: https://wikipedia.org/wiki/Rust_(programming_language) + ProfileFrameTypeNative = ProfileFrameTypeKey.String("native") + // [Perl] + // + // Stability: development + // + // [Perl]: https://wikipedia.org/wiki/Perl + ProfileFrameTypePerl = ProfileFrameTypeKey.String("perl") + // [PHP] + // + // Stability: development + // + // [PHP]: https://wikipedia.org/wiki/PHP + ProfileFrameTypePHP = ProfileFrameTypeKey.String("php") + // [Python] + // + // Stability: development + // + // [Python]: https://wikipedia.org/wiki/Python_(programming_language) + ProfileFrameTypeCpython = ProfileFrameTypeKey.String("cpython") + // [Ruby] + // + // Stability: development + // + // [Ruby]: https://wikipedia.org/wiki/Ruby_(programming_language) + ProfileFrameTypeRuby = ProfileFrameTypeKey.String("ruby") + // [V8JS] + // + // Stability: development + // + // [V8JS]: https://wikipedia.org/wiki/V8_(JavaScript_engine) + ProfileFrameTypeV8JS = ProfileFrameTypeKey.String("v8js") + // [Erlang] + // + // Stability: development + // + // [Erlang]: https://en.wikipedia.org/wiki/BEAM_(Erlang_virtual_machine) + ProfileFrameTypeBeam = ProfileFrameTypeKey.String("beam") + // [Go], + // + // Stability: development + // + // [Go]: https://wikipedia.org/wiki/Go_(programming_language) + ProfileFrameTypeGo = ProfileFrameTypeKey.String("go") + // [Rust] + // + // Stability: development + // + // [Rust]: https://wikipedia.org/wiki/Rust_(programming_language) + ProfileFrameTypeRust = ProfileFrameTypeKey.String("rust") +) + +// Namespace: rpc +const ( + // RPCConnectRPCErrorCodeKey is the attribute Key conforming to the + // "rpc.connect_rpc.error_code" semantic conventions. It represents the + // [error codes] of the Connect request. Error codes are always string values. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // + // [error codes]: https://connectrpc.com//docs/protocol/#error-codes + RPCConnectRPCErrorCodeKey = attribute.Key("rpc.connect_rpc.error_code") + + // RPCGRPCStatusCodeKey is the attribute Key conforming to the + // "rpc.grpc.status_code" semantic conventions. It represents the + // [numeric status code] of the gRPC request. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // + // [numeric status code]: https://github.com/grpc/grpc/blob/v1.33.2/doc/statuscodes.md + RPCGRPCStatusCodeKey = attribute.Key("rpc.grpc.status_code") + + // RPCJSONRPCErrorCodeKey is the attribute Key conforming to the + // "rpc.jsonrpc.error_code" semantic conventions. It represents the `error.code` + // property of response if it is an error response. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: -32700, 100 + RPCJSONRPCErrorCodeKey = attribute.Key("rpc.jsonrpc.error_code") + + // RPCJSONRPCErrorMessageKey is the attribute Key conforming to the + // "rpc.jsonrpc.error_message" semantic conventions. It represents the + // `error.message` property of response if it is an error response. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Parse error", "User already exists" + RPCJSONRPCErrorMessageKey = attribute.Key("rpc.jsonrpc.error_message") + + // RPCJSONRPCRequestIDKey is the attribute Key conforming to the + // "rpc.jsonrpc.request_id" semantic conventions. It represents the `id` + // property of request or response. Since protocol allows id to be int, string, + // `null` or missing (for notifications), value is expected to be cast to string + // for simplicity. Use empty string in case of `null` value. Omit entirely if + // this is a notification. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "10", "request-7", "" + RPCJSONRPCRequestIDKey = attribute.Key("rpc.jsonrpc.request_id") + + // RPCJSONRPCVersionKey is the attribute Key conforming to the + // "rpc.jsonrpc.version" semantic conventions. It represents the protocol + // version as in `jsonrpc` property of request/response. Since JSON-RPC 1.0 + // doesn't specify this, the value can be omitted. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2.0", "1.0" + RPCJSONRPCVersionKey = attribute.Key("rpc.jsonrpc.version") + + // RPCMessageCompressedSizeKey is the attribute Key conforming to the + // "rpc.message.compressed_size" semantic conventions. It represents the + // compressed size of the message in bytes. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + RPCMessageCompressedSizeKey = attribute.Key("rpc.message.compressed_size") + + // RPCMessageIDKey is the attribute Key conforming to the "rpc.message.id" + // semantic conventions. It MUST be calculated as two different counters + // starting from `1` one for sent messages and one for received message.. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: This way we guarantee that the values will be consistent between + // different implementations. + RPCMessageIDKey = attribute.Key("rpc.message.id") + + // RPCMessageTypeKey is the attribute Key conforming to the "rpc.message.type" + // semantic conventions. It represents the whether this is a received or sent + // message. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + RPCMessageTypeKey = attribute.Key("rpc.message.type") + + // RPCMessageUncompressedSizeKey is the attribute Key conforming to the + // "rpc.message.uncompressed_size" semantic conventions. It represents the + // uncompressed size of the message in bytes. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + RPCMessageUncompressedSizeKey = attribute.Key("rpc.message.uncompressed_size") + + // RPCMethodKey is the attribute Key conforming to the "rpc.method" semantic + // conventions. It represents the name of the (logical) method being called, + // must be equal to the $method part in the span name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: exampleMethod + // Note: This is the logical name of the method from the RPC interface + // perspective, which can be different from the name of any implementing + // method/function. The `code.function.name` attribute may be used to store the + // latter (e.g., method actually executing the call on the server side, RPC + // client stub method on the client side). + RPCMethodKey = attribute.Key("rpc.method") + + // RPCServiceKey is the attribute Key conforming to the "rpc.service" semantic + // conventions. It represents the full (logical) name of the service being + // called, including its package name, if applicable. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: myservice.EchoService + // Note: This is the logical name of the service from the RPC interface + // perspective, which can be different from the name of any implementing class. + // The `code.namespace` attribute may be used to store the latter (despite the + // attribute name, it may include a class name; e.g., class with method actually + // executing the call on the server side, RPC client stub class on the client + // side). + RPCServiceKey = attribute.Key("rpc.service") + + // RPCSystemKey is the attribute Key conforming to the "rpc.system" semantic + // conventions. It represents a string identifying the remoting system. See + // below for a list of well-known identifiers. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + RPCSystemKey = attribute.Key("rpc.system") +) + +// RPCJSONRPCErrorCode returns an attribute KeyValue conforming to the +// "rpc.jsonrpc.error_code" semantic conventions. It represents the `error.code` +// property of response if it is an error response. +func RPCJSONRPCErrorCode(val int) attribute.KeyValue { + return RPCJSONRPCErrorCodeKey.Int(val) +} + +// RPCJSONRPCErrorMessage returns an attribute KeyValue conforming to the +// "rpc.jsonrpc.error_message" semantic conventions. It represents the +// `error.message` property of response if it is an error response. +func RPCJSONRPCErrorMessage(val string) attribute.KeyValue { + return RPCJSONRPCErrorMessageKey.String(val) +} + +// RPCJSONRPCRequestID returns an attribute KeyValue conforming to the +// "rpc.jsonrpc.request_id" semantic conventions. It represents the `id` property +// of request or response. Since protocol allows id to be int, string, `null` or +// missing (for notifications), value is expected to be cast to string for +// simplicity. Use empty string in case of `null` value. Omit entirely if this is +// a notification. +func RPCJSONRPCRequestID(val string) attribute.KeyValue { + return RPCJSONRPCRequestIDKey.String(val) +} + +// RPCJSONRPCVersion returns an attribute KeyValue conforming to the +// "rpc.jsonrpc.version" semantic conventions. It represents the protocol version +// as in `jsonrpc` property of request/response. Since JSON-RPC 1.0 doesn't +// specify this, the value can be omitted. +func RPCJSONRPCVersion(val string) attribute.KeyValue { + return RPCJSONRPCVersionKey.String(val) +} + +// RPCMessageCompressedSize returns an attribute KeyValue conforming to the +// "rpc.message.compressed_size" semantic conventions. It represents the +// compressed size of the message in bytes. +func RPCMessageCompressedSize(val int) attribute.KeyValue { + return RPCMessageCompressedSizeKey.Int(val) +} + +// RPCMessageID returns an attribute KeyValue conforming to the "rpc.message.id" +// semantic conventions. It MUST be calculated as two different counters starting +// from `1` one for sent messages and one for received message.. +func RPCMessageID(val int) attribute.KeyValue { + return RPCMessageIDKey.Int(val) +} + +// RPCMessageUncompressedSize returns an attribute KeyValue conforming to the +// "rpc.message.uncompressed_size" semantic conventions. It represents the +// uncompressed size of the message in bytes. +func RPCMessageUncompressedSize(val int) attribute.KeyValue { + return RPCMessageUncompressedSizeKey.Int(val) +} + +// RPCMethod returns an attribute KeyValue conforming to the "rpc.method" +// semantic conventions. It represents the name of the (logical) method being +// called, must be equal to the $method part in the span name. +func RPCMethod(val string) attribute.KeyValue { + return RPCMethodKey.String(val) +} + +// RPCService returns an attribute KeyValue conforming to the "rpc.service" +// semantic conventions. It represents the full (logical) name of the service +// being called, including its package name, if applicable. +func RPCService(val string) attribute.KeyValue { + return RPCServiceKey.String(val) +} + +// Enum values for rpc.connect_rpc.error_code +var ( + // cancelled + // Stability: development + RPCConnectRPCErrorCodeCancelled = RPCConnectRPCErrorCodeKey.String("cancelled") + // unknown + // Stability: development + RPCConnectRPCErrorCodeUnknown = RPCConnectRPCErrorCodeKey.String("unknown") + // invalid_argument + // Stability: development + RPCConnectRPCErrorCodeInvalidArgument = RPCConnectRPCErrorCodeKey.String("invalid_argument") + // deadline_exceeded + // Stability: development + RPCConnectRPCErrorCodeDeadlineExceeded = RPCConnectRPCErrorCodeKey.String("deadline_exceeded") + // not_found + // Stability: development + RPCConnectRPCErrorCodeNotFound = RPCConnectRPCErrorCodeKey.String("not_found") + // already_exists + // Stability: development + RPCConnectRPCErrorCodeAlreadyExists = RPCConnectRPCErrorCodeKey.String("already_exists") + // permission_denied + // Stability: development + RPCConnectRPCErrorCodePermissionDenied = RPCConnectRPCErrorCodeKey.String("permission_denied") + // resource_exhausted + // Stability: development + RPCConnectRPCErrorCodeResourceExhausted = RPCConnectRPCErrorCodeKey.String("resource_exhausted") + // failed_precondition + // Stability: development + RPCConnectRPCErrorCodeFailedPrecondition = RPCConnectRPCErrorCodeKey.String("failed_precondition") + // aborted + // Stability: development + RPCConnectRPCErrorCodeAborted = RPCConnectRPCErrorCodeKey.String("aborted") + // out_of_range + // Stability: development + RPCConnectRPCErrorCodeOutOfRange = RPCConnectRPCErrorCodeKey.String("out_of_range") + // unimplemented + // Stability: development + RPCConnectRPCErrorCodeUnimplemented = RPCConnectRPCErrorCodeKey.String("unimplemented") + // internal + // Stability: development + RPCConnectRPCErrorCodeInternal = RPCConnectRPCErrorCodeKey.String("internal") + // unavailable + // Stability: development + RPCConnectRPCErrorCodeUnavailable = RPCConnectRPCErrorCodeKey.String("unavailable") + // data_loss + // Stability: development + RPCConnectRPCErrorCodeDataLoss = RPCConnectRPCErrorCodeKey.String("data_loss") + // unauthenticated + // Stability: development + RPCConnectRPCErrorCodeUnauthenticated = RPCConnectRPCErrorCodeKey.String("unauthenticated") +) + +// Enum values for rpc.grpc.status_code +var ( + // OK + // Stability: development + RPCGRPCStatusCodeOk = RPCGRPCStatusCodeKey.Int(0) + // CANCELLED + // Stability: development + RPCGRPCStatusCodeCancelled = RPCGRPCStatusCodeKey.Int(1) + // UNKNOWN + // Stability: development + RPCGRPCStatusCodeUnknown = RPCGRPCStatusCodeKey.Int(2) + // INVALID_ARGUMENT + // Stability: development + RPCGRPCStatusCodeInvalidArgument = RPCGRPCStatusCodeKey.Int(3) + // DEADLINE_EXCEEDED + // Stability: development + RPCGRPCStatusCodeDeadlineExceeded = RPCGRPCStatusCodeKey.Int(4) + // NOT_FOUND + // Stability: development + RPCGRPCStatusCodeNotFound = RPCGRPCStatusCodeKey.Int(5) + // ALREADY_EXISTS + // Stability: development + RPCGRPCStatusCodeAlreadyExists = RPCGRPCStatusCodeKey.Int(6) + // PERMISSION_DENIED + // Stability: development + RPCGRPCStatusCodePermissionDenied = RPCGRPCStatusCodeKey.Int(7) + // RESOURCE_EXHAUSTED + // Stability: development + RPCGRPCStatusCodeResourceExhausted = RPCGRPCStatusCodeKey.Int(8) + // FAILED_PRECONDITION + // Stability: development + RPCGRPCStatusCodeFailedPrecondition = RPCGRPCStatusCodeKey.Int(9) + // ABORTED + // Stability: development + RPCGRPCStatusCodeAborted = RPCGRPCStatusCodeKey.Int(10) + // OUT_OF_RANGE + // Stability: development + RPCGRPCStatusCodeOutOfRange = RPCGRPCStatusCodeKey.Int(11) + // UNIMPLEMENTED + // Stability: development + RPCGRPCStatusCodeUnimplemented = RPCGRPCStatusCodeKey.Int(12) + // INTERNAL + // Stability: development + RPCGRPCStatusCodeInternal = RPCGRPCStatusCodeKey.Int(13) + // UNAVAILABLE + // Stability: development + RPCGRPCStatusCodeUnavailable = RPCGRPCStatusCodeKey.Int(14) + // DATA_LOSS + // Stability: development + RPCGRPCStatusCodeDataLoss = RPCGRPCStatusCodeKey.Int(15) + // UNAUTHENTICATED + // Stability: development + RPCGRPCStatusCodeUnauthenticated = RPCGRPCStatusCodeKey.Int(16) +) + +// Enum values for rpc.message.type +var ( + // sent + // Stability: development + RPCMessageTypeSent = RPCMessageTypeKey.String("SENT") + // received + // Stability: development + RPCMessageTypeReceived = RPCMessageTypeKey.String("RECEIVED") +) + +// Enum values for rpc.system +var ( + // gRPC + // Stability: development + RPCSystemGRPC = RPCSystemKey.String("grpc") + // Java RMI + // Stability: development + RPCSystemJavaRmi = RPCSystemKey.String("java_rmi") + // .NET WCF + // Stability: development + RPCSystemDotnetWcf = RPCSystemKey.String("dotnet_wcf") + // Apache Dubbo + // Stability: development + RPCSystemApacheDubbo = RPCSystemKey.String("apache_dubbo") + // Connect RPC + // Stability: development + RPCSystemConnectRPC = RPCSystemKey.String("connect_rpc") +) + +// Namespace: security_rule +const ( + // SecurityRuleCategoryKey is the attribute Key conforming to the + // "security_rule.category" semantic conventions. It represents a categorization + // value keyword used by the entity using the rule for detection of this event. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Attempted Information Leak" + SecurityRuleCategoryKey = attribute.Key("security_rule.category") + + // SecurityRuleDescriptionKey is the attribute Key conforming to the + // "security_rule.description" semantic conventions. It represents the + // description of the rule generating the event. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Block requests to public DNS over HTTPS / TLS protocols" + SecurityRuleDescriptionKey = attribute.Key("security_rule.description") + + // SecurityRuleLicenseKey is the attribute Key conforming to the + // "security_rule.license" semantic conventions. It represents the name of the + // license under which the rule used to generate this event is made available. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Apache 2.0" + SecurityRuleLicenseKey = attribute.Key("security_rule.license") + + // SecurityRuleNameKey is the attribute Key conforming to the + // "security_rule.name" semantic conventions. It represents the name of the rule + // or signature generating the event. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "BLOCK_DNS_over_TLS" + SecurityRuleNameKey = attribute.Key("security_rule.name") + + // SecurityRuleReferenceKey is the attribute Key conforming to the + // "security_rule.reference" semantic conventions. It represents the reference + // URL to additional information about the rule used to generate this event. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "https://en.wikipedia.org/wiki/DNS_over_TLS" + // Note: The URL can point to the vendor’s documentation about the rule. If + // that’s not available, it can also be a link to a more general page + // describing this type of alert. + SecurityRuleReferenceKey = attribute.Key("security_rule.reference") + + // SecurityRuleRulesetNameKey is the attribute Key conforming to the + // "security_rule.ruleset.name" semantic conventions. It represents the name of + // the ruleset, policy, group, or parent category in which the rule used to + // generate this event is a member. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Standard_Protocol_Filters" + SecurityRuleRulesetNameKey = attribute.Key("security_rule.ruleset.name") + + // SecurityRuleUUIDKey is the attribute Key conforming to the + // "security_rule.uuid" semantic conventions. It represents a rule ID that is + // unique within the scope of a set or group of agents, observers, or other + // entities using the rule for detection of this event. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "550e8400-e29b-41d4-a716-446655440000", "1100110011" + SecurityRuleUUIDKey = attribute.Key("security_rule.uuid") + + // SecurityRuleVersionKey is the attribute Key conforming to the + // "security_rule.version" semantic conventions. It represents the version / + // revision of the rule being used for analysis. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1.0.0" + SecurityRuleVersionKey = attribute.Key("security_rule.version") +) + +// SecurityRuleCategory returns an attribute KeyValue conforming to the +// "security_rule.category" semantic conventions. It represents a categorization +// value keyword used by the entity using the rule for detection of this event. +func SecurityRuleCategory(val string) attribute.KeyValue { + return SecurityRuleCategoryKey.String(val) +} + +// SecurityRuleDescription returns an attribute KeyValue conforming to the +// "security_rule.description" semantic conventions. It represents the +// description of the rule generating the event. +func SecurityRuleDescription(val string) attribute.KeyValue { + return SecurityRuleDescriptionKey.String(val) +} + +// SecurityRuleLicense returns an attribute KeyValue conforming to the +// "security_rule.license" semantic conventions. It represents the name of the +// license under which the rule used to generate this event is made available. +func SecurityRuleLicense(val string) attribute.KeyValue { + return SecurityRuleLicenseKey.String(val) +} + +// SecurityRuleName returns an attribute KeyValue conforming to the +// "security_rule.name" semantic conventions. It represents the name of the rule +// or signature generating the event. +func SecurityRuleName(val string) attribute.KeyValue { + return SecurityRuleNameKey.String(val) +} + +// SecurityRuleReference returns an attribute KeyValue conforming to the +// "security_rule.reference" semantic conventions. It represents the reference +// URL to additional information about the rule used to generate this event. +func SecurityRuleReference(val string) attribute.KeyValue { + return SecurityRuleReferenceKey.String(val) +} + +// SecurityRuleRulesetName returns an attribute KeyValue conforming to the +// "security_rule.ruleset.name" semantic conventions. It represents the name of +// the ruleset, policy, group, or parent category in which the rule used to +// generate this event is a member. +func SecurityRuleRulesetName(val string) attribute.KeyValue { + return SecurityRuleRulesetNameKey.String(val) +} + +// SecurityRuleUUID returns an attribute KeyValue conforming to the +// "security_rule.uuid" semantic conventions. It represents a rule ID that is +// unique within the scope of a set or group of agents, observers, or other +// entities using the rule for detection of this event. +func SecurityRuleUUID(val string) attribute.KeyValue { + return SecurityRuleUUIDKey.String(val) +} + +// SecurityRuleVersion returns an attribute KeyValue conforming to the +// "security_rule.version" semantic conventions. It represents the version / +// revision of the rule being used for analysis. +func SecurityRuleVersion(val string) attribute.KeyValue { + return SecurityRuleVersionKey.String(val) +} + +// Namespace: server +const ( + // ServerAddressKey is the attribute Key conforming to the "server.address" + // semantic conventions. It represents the server domain name if available + // without reverse DNS lookup; otherwise, IP address or Unix domain socket name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "example.com", "10.1.2.80", "/tmp/my.sock" + // Note: When observed from the client side, and when communicating through an + // intermediary, `server.address` SHOULD represent the server address behind any + // intermediaries, for example proxies, if it's available. + ServerAddressKey = attribute.Key("server.address") + + // ServerPortKey is the attribute Key conforming to the "server.port" semantic + // conventions. It represents the server port number. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: 80, 8080, 443 + // Note: When observed from the client side, and when communicating through an + // intermediary, `server.port` SHOULD represent the server port behind any + // intermediaries, for example proxies, if it's available. + ServerPortKey = attribute.Key("server.port") +) + +// ServerAddress returns an attribute KeyValue conforming to the "server.address" +// semantic conventions. It represents the server domain name if available +// without reverse DNS lookup; otherwise, IP address or Unix domain socket name. +func ServerAddress(val string) attribute.KeyValue { + return ServerAddressKey.String(val) +} + +// ServerPort returns an attribute KeyValue conforming to the "server.port" +// semantic conventions. It represents the server port number. +func ServerPort(val int) attribute.KeyValue { + return ServerPortKey.Int(val) +} + +// Namespace: service +const ( + // ServiceInstanceIDKey is the attribute Key conforming to the + // "service.instance.id" semantic conventions. It represents the string ID of + // the service instance. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "627cc493-f310-47de-96bd-71410b7dec09" + // Note: MUST be unique for each instance of the same + // `service.namespace,service.name` pair (in other words + // `service.namespace,service.name,service.instance.id` triplet MUST be globally + // unique). The ID helps to + // distinguish instances of the same service that exist at the same time (e.g. + // instances of a horizontally scaled + // service). + // + // Implementations, such as SDKs, are recommended to generate a random Version 1 + // or Version 4 [RFC + // 4122] UUID, but are free to use an inherent unique ID as + // the source of + // this value if stability is desirable. In that case, the ID SHOULD be used as + // source of a UUID Version 5 and + // SHOULD use the following UUID as the namespace: + // `4d63009a-8d0f-11ee-aad7-4c796ed8e320`. + // + // UUIDs are typically recommended, as only an opaque value for the purposes of + // identifying a service instance is + // needed. Similar to what can be seen in the man page for the + // [`/etc/machine-id`] file, the underlying + // data, such as pod name and namespace should be treated as confidential, being + // the user's choice to expose it + // or not via another resource attribute. + // + // For applications running behind an application server (like unicorn), we do + // not recommend using one identifier + // for all processes participating in the application. Instead, it's recommended + // each division (e.g. a worker + // thread in unicorn) to have its own instance.id. + // + // It's not recommended for a Collector to set `service.instance.id` if it can't + // unambiguously determine the + // service instance that is generating that telemetry. For instance, creating an + // UUID based on `pod.name` will + // likely be wrong, as the Collector might not know from which container within + // that pod the telemetry originated. + // However, Collectors can set the `service.instance.id` if they can + // unambiguously determine the service instance + // for that telemetry. This is typically the case for scraping receivers, as + // they know the target address and + // port. + // + // [RFC + // 4122]: https://www.ietf.org/rfc/rfc4122.txt + // [`/etc/machine-id`]: https://www.freedesktop.org/software/systemd/man/latest/machine-id.html + ServiceInstanceIDKey = attribute.Key("service.instance.id") + + // ServiceNameKey is the attribute Key conforming to the "service.name" semantic + // conventions. It represents the logical name of the service. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "shoppingcart" + // Note: MUST be the same for all instances of horizontally scaled services. If + // the value was not specified, SDKs MUST fallback to `unknown_service:` + // concatenated with [`process.executable.name`], e.g. `unknown_service:bash`. + // If `process.executable.name` is not available, the value MUST be set to + // `unknown_service`. + // + // [`process.executable.name`]: process.md + ServiceNameKey = attribute.Key("service.name") + + // ServiceNamespaceKey is the attribute Key conforming to the + // "service.namespace" semantic conventions. It represents a namespace for + // `service.name`. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Shop" + // Note: A string value having a meaning that helps to distinguish a group of + // services, for example the team name that owns a group of services. + // `service.name` is expected to be unique within the same namespace. If + // `service.namespace` is not specified in the Resource then `service.name` is + // expected to be unique for all services that have no explicit namespace + // defined (so the empty/unspecified namespace is simply one more valid + // namespace). Zero-length namespace string is assumed equal to unspecified + // namespace. + ServiceNamespaceKey = attribute.Key("service.namespace") + + // ServiceVersionKey is the attribute Key conforming to the "service.version" + // semantic conventions. It represents the version string of the service API or + // implementation. The format is not defined by these conventions. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "2.0.0", "a01dbef8a" + ServiceVersionKey = attribute.Key("service.version") +) + +// ServiceInstanceID returns an attribute KeyValue conforming to the +// "service.instance.id" semantic conventions. It represents the string ID of the +// service instance. +func ServiceInstanceID(val string) attribute.KeyValue { + return ServiceInstanceIDKey.String(val) +} + +// ServiceName returns an attribute KeyValue conforming to the "service.name" +// semantic conventions. It represents the logical name of the service. +func ServiceName(val string) attribute.KeyValue { + return ServiceNameKey.String(val) +} + +// ServiceNamespace returns an attribute KeyValue conforming to the +// "service.namespace" semantic conventions. It represents a namespace for +// `service.name`. +func ServiceNamespace(val string) attribute.KeyValue { + return ServiceNamespaceKey.String(val) +} + +// ServiceVersion returns an attribute KeyValue conforming to the +// "service.version" semantic conventions. It represents the version string of +// the service API or implementation. The format is not defined by these +// conventions. +func ServiceVersion(val string) attribute.KeyValue { + return ServiceVersionKey.String(val) +} + +// Namespace: session +const ( + // SessionIDKey is the attribute Key conforming to the "session.id" semantic + // conventions. It represents a unique id to identify a session. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 00112233-4455-6677-8899-aabbccddeeff + SessionIDKey = attribute.Key("session.id") + + // SessionPreviousIDKey is the attribute Key conforming to the + // "session.previous_id" semantic conventions. It represents the previous + // `session.id` for this user, when known. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 00112233-4455-6677-8899-aabbccddeeff + SessionPreviousIDKey = attribute.Key("session.previous_id") +) + +// SessionID returns an attribute KeyValue conforming to the "session.id" +// semantic conventions. It represents a unique id to identify a session. +func SessionID(val string) attribute.KeyValue { + return SessionIDKey.String(val) +} + +// SessionPreviousID returns an attribute KeyValue conforming to the +// "session.previous_id" semantic conventions. It represents the previous +// `session.id` for this user, when known. +func SessionPreviousID(val string) attribute.KeyValue { + return SessionPreviousIDKey.String(val) +} + +// Namespace: signalr +const ( + // SignalRConnectionStatusKey is the attribute Key conforming to the + // "signalr.connection.status" semantic conventions. It represents the signalR + // HTTP connection closure status. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "app_shutdown", "timeout" + SignalRConnectionStatusKey = attribute.Key("signalr.connection.status") + + // SignalRTransportKey is the attribute Key conforming to the + // "signalr.transport" semantic conventions. It represents the + // [SignalR transport type]. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "web_sockets", "long_polling" + // + // [SignalR transport type]: https://github.com/dotnet/aspnetcore/blob/main/src/SignalR/docs/specs/TransportProtocols.md + SignalRTransportKey = attribute.Key("signalr.transport") +) + +// Enum values for signalr.connection.status +var ( + // The connection was closed normally. + // Stability: stable + SignalRConnectionStatusNormalClosure = SignalRConnectionStatusKey.String("normal_closure") + // The connection was closed due to a timeout. + // Stability: stable + SignalRConnectionStatusTimeout = SignalRConnectionStatusKey.String("timeout") + // The connection was closed because the app is shutting down. + // Stability: stable + SignalRConnectionStatusAppShutdown = SignalRConnectionStatusKey.String("app_shutdown") +) + +// Enum values for signalr.transport +var ( + // ServerSentEvents protocol + // Stability: stable + SignalRTransportServerSentEvents = SignalRTransportKey.String("server_sent_events") + // LongPolling protocol + // Stability: stable + SignalRTransportLongPolling = SignalRTransportKey.String("long_polling") + // WebSockets protocol + // Stability: stable + SignalRTransportWebSockets = SignalRTransportKey.String("web_sockets") +) + +// Namespace: source +const ( + // SourceAddressKey is the attribute Key conforming to the "source.address" + // semantic conventions. It represents the source address - domain name if + // available without reverse DNS lookup; otherwise, IP address or Unix domain + // socket name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "source.example.com", "10.1.2.80", "/tmp/my.sock" + // Note: When observed from the destination side, and when communicating through + // an intermediary, `source.address` SHOULD represent the source address behind + // any intermediaries, for example proxies, if it's available. + SourceAddressKey = attribute.Key("source.address") + + // SourcePortKey is the attribute Key conforming to the "source.port" semantic + // conventions. It represents the source port number. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 3389, 2888 + SourcePortKey = attribute.Key("source.port") +) + +// SourceAddress returns an attribute KeyValue conforming to the "source.address" +// semantic conventions. It represents the source address - domain name if +// available without reverse DNS lookup; otherwise, IP address or Unix domain +// socket name. +func SourceAddress(val string) attribute.KeyValue { + return SourceAddressKey.String(val) +} + +// SourcePort returns an attribute KeyValue conforming to the "source.port" +// semantic conventions. It represents the source port number. +func SourcePort(val int) attribute.KeyValue { + return SourcePortKey.Int(val) +} + +// Namespace: system +const ( + // SystemCPULogicalNumberKey is the attribute Key conforming to the + // "system.cpu.logical_number" semantic conventions. It represents the + // deprecated, use `cpu.logical_number` instead. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1 + SystemCPULogicalNumberKey = attribute.Key("system.cpu.logical_number") + + // SystemDeviceKey is the attribute Key conforming to the "system.device" + // semantic conventions. It represents the device identifier. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "(identifier)" + SystemDeviceKey = attribute.Key("system.device") + + // SystemFilesystemModeKey is the attribute Key conforming to the + // "system.filesystem.mode" semantic conventions. It represents the filesystem + // mode. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "rw, ro" + SystemFilesystemModeKey = attribute.Key("system.filesystem.mode") + + // SystemFilesystemMountpointKey is the attribute Key conforming to the + // "system.filesystem.mountpoint" semantic conventions. It represents the + // filesystem mount path. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "/mnt/data" + SystemFilesystemMountpointKey = attribute.Key("system.filesystem.mountpoint") + + // SystemFilesystemStateKey is the attribute Key conforming to the + // "system.filesystem.state" semantic conventions. It represents the filesystem + // state. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "used" + SystemFilesystemStateKey = attribute.Key("system.filesystem.state") + + // SystemFilesystemTypeKey is the attribute Key conforming to the + // "system.filesystem.type" semantic conventions. It represents the filesystem + // type. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "ext4" + SystemFilesystemTypeKey = attribute.Key("system.filesystem.type") + + // SystemMemoryStateKey is the attribute Key conforming to the + // "system.memory.state" semantic conventions. It represents the memory state. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "free", "cached" + SystemMemoryStateKey = attribute.Key("system.memory.state") + + // SystemPagingDirectionKey is the attribute Key conforming to the + // "system.paging.direction" semantic conventions. It represents the paging + // access direction. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "in" + SystemPagingDirectionKey = attribute.Key("system.paging.direction") + + // SystemPagingStateKey is the attribute Key conforming to the + // "system.paging.state" semantic conventions. It represents the memory paging + // state. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "free" + SystemPagingStateKey = attribute.Key("system.paging.state") + + // SystemPagingTypeKey is the attribute Key conforming to the + // "system.paging.type" semantic conventions. It represents the memory paging + // type. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "minor" + SystemPagingTypeKey = attribute.Key("system.paging.type") + + // SystemProcessStatusKey is the attribute Key conforming to the + // "system.process.status" semantic conventions. It represents the process + // state, e.g., [Linux Process State Codes]. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "running" + // + // [Linux Process State Codes]: https://man7.org/linux/man-pages/man1/ps.1.html#PROCESS_STATE_CODES + SystemProcessStatusKey = attribute.Key("system.process.status") +) + +// SystemCPULogicalNumber returns an attribute KeyValue conforming to the +// "system.cpu.logical_number" semantic conventions. It represents the +// deprecated, use `cpu.logical_number` instead. +func SystemCPULogicalNumber(val int) attribute.KeyValue { + return SystemCPULogicalNumberKey.Int(val) +} + +// SystemDevice returns an attribute KeyValue conforming to the "system.device" +// semantic conventions. It represents the device identifier. +func SystemDevice(val string) attribute.KeyValue { + return SystemDeviceKey.String(val) +} + +// SystemFilesystemMode returns an attribute KeyValue conforming to the +// "system.filesystem.mode" semantic conventions. It represents the filesystem +// mode. +func SystemFilesystemMode(val string) attribute.KeyValue { + return SystemFilesystemModeKey.String(val) +} + +// SystemFilesystemMountpoint returns an attribute KeyValue conforming to the +// "system.filesystem.mountpoint" semantic conventions. It represents the +// filesystem mount path. +func SystemFilesystemMountpoint(val string) attribute.KeyValue { + return SystemFilesystemMountpointKey.String(val) +} + +// Enum values for system.filesystem.state +var ( + // used + // Stability: development + SystemFilesystemStateUsed = SystemFilesystemStateKey.String("used") + // free + // Stability: development + SystemFilesystemStateFree = SystemFilesystemStateKey.String("free") + // reserved + // Stability: development + SystemFilesystemStateReserved = SystemFilesystemStateKey.String("reserved") +) + +// Enum values for system.filesystem.type +var ( + // fat32 + // Stability: development + SystemFilesystemTypeFat32 = SystemFilesystemTypeKey.String("fat32") + // exfat + // Stability: development + SystemFilesystemTypeExfat = SystemFilesystemTypeKey.String("exfat") + // ntfs + // Stability: development + SystemFilesystemTypeNtfs = SystemFilesystemTypeKey.String("ntfs") + // refs + // Stability: development + SystemFilesystemTypeRefs = SystemFilesystemTypeKey.String("refs") + // hfsplus + // Stability: development + SystemFilesystemTypeHfsplus = SystemFilesystemTypeKey.String("hfsplus") + // ext4 + // Stability: development + SystemFilesystemTypeExt4 = SystemFilesystemTypeKey.String("ext4") +) + +// Enum values for system.memory.state +var ( + // used + // Stability: development + SystemMemoryStateUsed = SystemMemoryStateKey.String("used") + // free + // Stability: development + SystemMemoryStateFree = SystemMemoryStateKey.String("free") + // Deprecated: Removed, report shared memory usage with + // `metric.system.memory.shared` metric. + SystemMemoryStateShared = SystemMemoryStateKey.String("shared") + // buffers + // Stability: development + SystemMemoryStateBuffers = SystemMemoryStateKey.String("buffers") + // cached + // Stability: development + SystemMemoryStateCached = SystemMemoryStateKey.String("cached") +) + +// Enum values for system.paging.direction +var ( + // in + // Stability: development + SystemPagingDirectionIn = SystemPagingDirectionKey.String("in") + // out + // Stability: development + SystemPagingDirectionOut = SystemPagingDirectionKey.String("out") +) + +// Enum values for system.paging.state +var ( + // used + // Stability: development + SystemPagingStateUsed = SystemPagingStateKey.String("used") + // free + // Stability: development + SystemPagingStateFree = SystemPagingStateKey.String("free") +) + +// Enum values for system.paging.type +var ( + // major + // Stability: development + SystemPagingTypeMajor = SystemPagingTypeKey.String("major") + // minor + // Stability: development + SystemPagingTypeMinor = SystemPagingTypeKey.String("minor") +) + +// Enum values for system.process.status +var ( + // running + // Stability: development + SystemProcessStatusRunning = SystemProcessStatusKey.String("running") + // sleeping + // Stability: development + SystemProcessStatusSleeping = SystemProcessStatusKey.String("sleeping") + // stopped + // Stability: development + SystemProcessStatusStopped = SystemProcessStatusKey.String("stopped") + // defunct + // Stability: development + SystemProcessStatusDefunct = SystemProcessStatusKey.String("defunct") +) + +// Namespace: telemetry +const ( + // TelemetryDistroNameKey is the attribute Key conforming to the + // "telemetry.distro.name" semantic conventions. It represents the name of the + // auto instrumentation agent or distribution, if used. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "parts-unlimited-java" + // Note: Official auto instrumentation agents and distributions SHOULD set the + // `telemetry.distro.name` attribute to + // a string starting with `opentelemetry-`, e.g. + // `opentelemetry-java-instrumentation`. + TelemetryDistroNameKey = attribute.Key("telemetry.distro.name") + + // TelemetryDistroVersionKey is the attribute Key conforming to the + // "telemetry.distro.version" semantic conventions. It represents the version + // string of the auto instrumentation agent or distribution, if used. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1.2.3" + TelemetryDistroVersionKey = attribute.Key("telemetry.distro.version") + + // TelemetrySDKLanguageKey is the attribute Key conforming to the + // "telemetry.sdk.language" semantic conventions. It represents the language of + // the telemetry SDK. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: + TelemetrySDKLanguageKey = attribute.Key("telemetry.sdk.language") + + // TelemetrySDKNameKey is the attribute Key conforming to the + // "telemetry.sdk.name" semantic conventions. It represents the name of the + // telemetry SDK as defined above. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "opentelemetry" + // Note: The OpenTelemetry SDK MUST set the `telemetry.sdk.name` attribute to + // `opentelemetry`. + // If another SDK, like a fork or a vendor-provided implementation, is used, + // this SDK MUST set the + // `telemetry.sdk.name` attribute to the fully-qualified class or module name of + // this SDK's main entry point + // or another suitable identifier depending on the language. + // The identifier `opentelemetry` is reserved and MUST NOT be used in this case. + // All custom identifiers SHOULD be stable across different versions of an + // implementation. + TelemetrySDKNameKey = attribute.Key("telemetry.sdk.name") + + // TelemetrySDKVersionKey is the attribute Key conforming to the + // "telemetry.sdk.version" semantic conventions. It represents the version + // string of the telemetry SDK. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "1.2.3" + TelemetrySDKVersionKey = attribute.Key("telemetry.sdk.version") +) + +// TelemetryDistroName returns an attribute KeyValue conforming to the +// "telemetry.distro.name" semantic conventions. It represents the name of the +// auto instrumentation agent or distribution, if used. +func TelemetryDistroName(val string) attribute.KeyValue { + return TelemetryDistroNameKey.String(val) +} + +// TelemetryDistroVersion returns an attribute KeyValue conforming to the +// "telemetry.distro.version" semantic conventions. It represents the version +// string of the auto instrumentation agent or distribution, if used. +func TelemetryDistroVersion(val string) attribute.KeyValue { + return TelemetryDistroVersionKey.String(val) +} + +// TelemetrySDKName returns an attribute KeyValue conforming to the +// "telemetry.sdk.name" semantic conventions. It represents the name of the +// telemetry SDK as defined above. +func TelemetrySDKName(val string) attribute.KeyValue { + return TelemetrySDKNameKey.String(val) +} + +// TelemetrySDKVersion returns an attribute KeyValue conforming to the +// "telemetry.sdk.version" semantic conventions. It represents the version string +// of the telemetry SDK. +func TelemetrySDKVersion(val string) attribute.KeyValue { + return TelemetrySDKVersionKey.String(val) +} + +// Enum values for telemetry.sdk.language +var ( + // cpp + // Stability: stable + TelemetrySDKLanguageCPP = TelemetrySDKLanguageKey.String("cpp") + // dotnet + // Stability: stable + TelemetrySDKLanguageDotnet = TelemetrySDKLanguageKey.String("dotnet") + // erlang + // Stability: stable + TelemetrySDKLanguageErlang = TelemetrySDKLanguageKey.String("erlang") + // go + // Stability: stable + TelemetrySDKLanguageGo = TelemetrySDKLanguageKey.String("go") + // java + // Stability: stable + TelemetrySDKLanguageJava = TelemetrySDKLanguageKey.String("java") + // nodejs + // Stability: stable + TelemetrySDKLanguageNodejs = TelemetrySDKLanguageKey.String("nodejs") + // php + // Stability: stable + TelemetrySDKLanguagePHP = TelemetrySDKLanguageKey.String("php") + // python + // Stability: stable + TelemetrySDKLanguagePython = TelemetrySDKLanguageKey.String("python") + // ruby + // Stability: stable + TelemetrySDKLanguageRuby = TelemetrySDKLanguageKey.String("ruby") + // rust + // Stability: stable + TelemetrySDKLanguageRust = TelemetrySDKLanguageKey.String("rust") + // swift + // Stability: stable + TelemetrySDKLanguageSwift = TelemetrySDKLanguageKey.String("swift") + // webjs + // Stability: stable + TelemetrySDKLanguageWebJS = TelemetrySDKLanguageKey.String("webjs") +) + +// Namespace: test +const ( + // TestCaseNameKey is the attribute Key conforming to the "test.case.name" + // semantic conventions. It represents the fully qualified human readable name + // of the [test case]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "org.example.TestCase1.test1", "example/tests/TestCase1.test1", + // "ExampleTestCase1_test1" + // + // [test case]: https://wikipedia.org/wiki/Test_case + TestCaseNameKey = attribute.Key("test.case.name") + + // TestCaseResultStatusKey is the attribute Key conforming to the + // "test.case.result.status" semantic conventions. It represents the status of + // the actual test case result from test execution. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "pass", "fail" + TestCaseResultStatusKey = attribute.Key("test.case.result.status") + + // TestSuiteNameKey is the attribute Key conforming to the "test.suite.name" + // semantic conventions. It represents the human readable name of a [test suite] + // . + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "TestSuite1" + // + // [test suite]: https://wikipedia.org/wiki/Test_suite + TestSuiteNameKey = attribute.Key("test.suite.name") + + // TestSuiteRunStatusKey is the attribute Key conforming to the + // "test.suite.run.status" semantic conventions. It represents the status of the + // test suite run. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "success", "failure", "skipped", "aborted", "timed_out", + // "in_progress" + TestSuiteRunStatusKey = attribute.Key("test.suite.run.status") +) + +// TestCaseName returns an attribute KeyValue conforming to the "test.case.name" +// semantic conventions. It represents the fully qualified human readable name of +// the [test case]. +// +// [test case]: https://wikipedia.org/wiki/Test_case +func TestCaseName(val string) attribute.KeyValue { + return TestCaseNameKey.String(val) +} + +// TestSuiteName returns an attribute KeyValue conforming to the +// "test.suite.name" semantic conventions. It represents the human readable name +// of a [test suite]. +// +// [test suite]: https://wikipedia.org/wiki/Test_suite +func TestSuiteName(val string) attribute.KeyValue { + return TestSuiteNameKey.String(val) +} + +// Enum values for test.case.result.status +var ( + // pass + // Stability: development + TestCaseResultStatusPass = TestCaseResultStatusKey.String("pass") + // fail + // Stability: development + TestCaseResultStatusFail = TestCaseResultStatusKey.String("fail") +) + +// Enum values for test.suite.run.status +var ( + // success + // Stability: development + TestSuiteRunStatusSuccess = TestSuiteRunStatusKey.String("success") + // failure + // Stability: development + TestSuiteRunStatusFailure = TestSuiteRunStatusKey.String("failure") + // skipped + // Stability: development + TestSuiteRunStatusSkipped = TestSuiteRunStatusKey.String("skipped") + // aborted + // Stability: development + TestSuiteRunStatusAborted = TestSuiteRunStatusKey.String("aborted") + // timed_out + // Stability: development + TestSuiteRunStatusTimedOut = TestSuiteRunStatusKey.String("timed_out") + // in_progress + // Stability: development + TestSuiteRunStatusInProgress = TestSuiteRunStatusKey.String("in_progress") +) + +// Namespace: thread +const ( + // ThreadIDKey is the attribute Key conforming to the "thread.id" semantic + // conventions. It represents the current "managed" thread ID (as opposed to OS + // thread ID). + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + ThreadIDKey = attribute.Key("thread.id") + + // ThreadNameKey is the attribute Key conforming to the "thread.name" semantic + // conventions. It represents the current thread name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: main + ThreadNameKey = attribute.Key("thread.name") +) + +// ThreadID returns an attribute KeyValue conforming to the "thread.id" semantic +// conventions. It represents the current "managed" thread ID (as opposed to OS +// thread ID). +func ThreadID(val int) attribute.KeyValue { + return ThreadIDKey.Int(val) +} + +// ThreadName returns an attribute KeyValue conforming to the "thread.name" +// semantic conventions. It represents the current thread name. +func ThreadName(val string) attribute.KeyValue { + return ThreadNameKey.String(val) +} + +// Namespace: tls +const ( + // TLSCipherKey is the attribute Key conforming to the "tls.cipher" semantic + // conventions. It represents the string indicating the [cipher] used during the + // current connection. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "TLS_RSA_WITH_3DES_EDE_CBC_SHA", + // "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256" + // Note: The values allowed for `tls.cipher` MUST be one of the `Descriptions` + // of the [registered TLS Cipher Suits]. + // + // [cipher]: https://datatracker.ietf.org/doc/html/rfc5246#appendix-A.5 + // [registered TLS Cipher Suits]: https://www.iana.org/assignments/tls-parameters/tls-parameters.xhtml#table-tls-parameters-4 + TLSCipherKey = attribute.Key("tls.cipher") + + // TLSClientCertificateKey is the attribute Key conforming to the + // "tls.client.certificate" semantic conventions. It represents the PEM-encoded + // stand-alone certificate offered by the client. This is usually + // mutually-exclusive of `client.certificate_chain` since this value also exists + // in that list. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "MII..." + TLSClientCertificateKey = attribute.Key("tls.client.certificate") + + // TLSClientCertificateChainKey is the attribute Key conforming to the + // "tls.client.certificate_chain" semantic conventions. It represents the array + // of PEM-encoded certificates that make up the certificate chain offered by the + // client. This is usually mutually-exclusive of `client.certificate` since that + // value should be the first certificate in the chain. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "MII...", "MI..." + TLSClientCertificateChainKey = attribute.Key("tls.client.certificate_chain") + + // TLSClientHashMd5Key is the attribute Key conforming to the + // "tls.client.hash.md5" semantic conventions. It represents the certificate + // fingerprint using the MD5 digest of DER-encoded version of certificate + // offered by the client. For consistency with other hash values, this value + // should be formatted as an uppercase hash. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "0F76C7F2C55BFD7D8E8B8F4BFBF0C9EC" + TLSClientHashMd5Key = attribute.Key("tls.client.hash.md5") + + // TLSClientHashSha1Key is the attribute Key conforming to the + // "tls.client.hash.sha1" semantic conventions. It represents the certificate + // fingerprint using the SHA1 digest of DER-encoded version of certificate + // offered by the client. For consistency with other hash values, this value + // should be formatted as an uppercase hash. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "9E393D93138888D288266C2D915214D1D1CCEB2A" + TLSClientHashSha1Key = attribute.Key("tls.client.hash.sha1") + + // TLSClientHashSha256Key is the attribute Key conforming to the + // "tls.client.hash.sha256" semantic conventions. It represents the certificate + // fingerprint using the SHA256 digest of DER-encoded version of certificate + // offered by the client. For consistency with other hash values, this value + // should be formatted as an uppercase hash. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "0687F666A054EF17A08E2F2162EAB4CBC0D265E1D7875BE74BF3C712CA92DAF0" + TLSClientHashSha256Key = attribute.Key("tls.client.hash.sha256") + + // TLSClientIssuerKey is the attribute Key conforming to the "tls.client.issuer" + // semantic conventions. It represents the distinguished name of [subject] of + // the issuer of the x.509 certificate presented by the client. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "CN=Example Root CA, OU=Infrastructure Team, DC=example, DC=com" + // + // [subject]: https://datatracker.ietf.org/doc/html/rfc5280#section-4.1.2.6 + TLSClientIssuerKey = attribute.Key("tls.client.issuer") + + // TLSClientJa3Key is the attribute Key conforming to the "tls.client.ja3" + // semantic conventions. It represents a hash that identifies clients based on + // how they perform an SSL/TLS handshake. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "d4e5b18d6b55c71272893221c96ba240" + TLSClientJa3Key = attribute.Key("tls.client.ja3") + + // TLSClientNotAfterKey is the attribute Key conforming to the + // "tls.client.not_after" semantic conventions. It represents the date/Time + // indicating when client certificate is no longer considered valid. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2021-01-01T00:00:00.000Z" + TLSClientNotAfterKey = attribute.Key("tls.client.not_after") + + // TLSClientNotBeforeKey is the attribute Key conforming to the + // "tls.client.not_before" semantic conventions. It represents the date/Time + // indicating when client certificate is first considered valid. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1970-01-01T00:00:00.000Z" + TLSClientNotBeforeKey = attribute.Key("tls.client.not_before") + + // TLSClientSubjectKey is the attribute Key conforming to the + // "tls.client.subject" semantic conventions. It represents the distinguished + // name of subject of the x.509 certificate presented by the client. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "CN=myclient, OU=Documentation Team, DC=example, DC=com" + TLSClientSubjectKey = attribute.Key("tls.client.subject") + + // TLSClientSupportedCiphersKey is the attribute Key conforming to the + // "tls.client.supported_ciphers" semantic conventions. It represents the array + // of ciphers offered by the client during the client hello. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", + // "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384" + TLSClientSupportedCiphersKey = attribute.Key("tls.client.supported_ciphers") + + // TLSCurveKey is the attribute Key conforming to the "tls.curve" semantic + // conventions. It represents the string indicating the curve used for the given + // cipher, when applicable. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "secp256r1" + TLSCurveKey = attribute.Key("tls.curve") + + // TLSEstablishedKey is the attribute Key conforming to the "tls.established" + // semantic conventions. It represents the boolean flag indicating if the TLS + // negotiation was successful and transitioned to an encrypted tunnel. + // + // Type: boolean + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: true + TLSEstablishedKey = attribute.Key("tls.established") + + // TLSNextProtocolKey is the attribute Key conforming to the "tls.next_protocol" + // semantic conventions. It represents the string indicating the protocol being + // tunneled. Per the values in the [IANA registry], this string should be lower + // case. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "http/1.1" + // + // [IANA registry]: https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids + TLSNextProtocolKey = attribute.Key("tls.next_protocol") + + // TLSProtocolNameKey is the attribute Key conforming to the "tls.protocol.name" + // semantic conventions. It represents the normalized lowercase protocol name + // parsed from original string of the negotiated [SSL/TLS protocol version]. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // + // [SSL/TLS protocol version]: https://docs.openssl.org/1.1.1/man3/SSL_get_version/#return-values + TLSProtocolNameKey = attribute.Key("tls.protocol.name") + + // TLSProtocolVersionKey is the attribute Key conforming to the + // "tls.protocol.version" semantic conventions. It represents the numeric part + // of the version parsed from the original string of the negotiated + // [SSL/TLS protocol version]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1.2", "3" + // + // [SSL/TLS protocol version]: https://docs.openssl.org/1.1.1/man3/SSL_get_version/#return-values + TLSProtocolVersionKey = attribute.Key("tls.protocol.version") + + // TLSResumedKey is the attribute Key conforming to the "tls.resumed" semantic + // conventions. It represents the boolean flag indicating if this TLS connection + // was resumed from an existing TLS negotiation. + // + // Type: boolean + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: true + TLSResumedKey = attribute.Key("tls.resumed") + + // TLSServerCertificateKey is the attribute Key conforming to the + // "tls.server.certificate" semantic conventions. It represents the PEM-encoded + // stand-alone certificate offered by the server. This is usually + // mutually-exclusive of `server.certificate_chain` since this value also exists + // in that list. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "MII..." + TLSServerCertificateKey = attribute.Key("tls.server.certificate") + + // TLSServerCertificateChainKey is the attribute Key conforming to the + // "tls.server.certificate_chain" semantic conventions. It represents the array + // of PEM-encoded certificates that make up the certificate chain offered by the + // server. This is usually mutually-exclusive of `server.certificate` since that + // value should be the first certificate in the chain. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "MII...", "MI..." + TLSServerCertificateChainKey = attribute.Key("tls.server.certificate_chain") + + // TLSServerHashMd5Key is the attribute Key conforming to the + // "tls.server.hash.md5" semantic conventions. It represents the certificate + // fingerprint using the MD5 digest of DER-encoded version of certificate + // offered by the server. For consistency with other hash values, this value + // should be formatted as an uppercase hash. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "0F76C7F2C55BFD7D8E8B8F4BFBF0C9EC" + TLSServerHashMd5Key = attribute.Key("tls.server.hash.md5") + + // TLSServerHashSha1Key is the attribute Key conforming to the + // "tls.server.hash.sha1" semantic conventions. It represents the certificate + // fingerprint using the SHA1 digest of DER-encoded version of certificate + // offered by the server. For consistency with other hash values, this value + // should be formatted as an uppercase hash. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "9E393D93138888D288266C2D915214D1D1CCEB2A" + TLSServerHashSha1Key = attribute.Key("tls.server.hash.sha1") + + // TLSServerHashSha256Key is the attribute Key conforming to the + // "tls.server.hash.sha256" semantic conventions. It represents the certificate + // fingerprint using the SHA256 digest of DER-encoded version of certificate + // offered by the server. For consistency with other hash values, this value + // should be formatted as an uppercase hash. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "0687F666A054EF17A08E2F2162EAB4CBC0D265E1D7875BE74BF3C712CA92DAF0" + TLSServerHashSha256Key = attribute.Key("tls.server.hash.sha256") + + // TLSServerIssuerKey is the attribute Key conforming to the "tls.server.issuer" + // semantic conventions. It represents the distinguished name of [subject] of + // the issuer of the x.509 certificate presented by the client. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "CN=Example Root CA, OU=Infrastructure Team, DC=example, DC=com" + // + // [subject]: https://datatracker.ietf.org/doc/html/rfc5280#section-4.1.2.6 + TLSServerIssuerKey = attribute.Key("tls.server.issuer") + + // TLSServerJa3sKey is the attribute Key conforming to the "tls.server.ja3s" + // semantic conventions. It represents a hash that identifies servers based on + // how they perform an SSL/TLS handshake. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "d4e5b18d6b55c71272893221c96ba240" + TLSServerJa3sKey = attribute.Key("tls.server.ja3s") + + // TLSServerNotAfterKey is the attribute Key conforming to the + // "tls.server.not_after" semantic conventions. It represents the date/Time + // indicating when server certificate is no longer considered valid. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2021-01-01T00:00:00.000Z" + TLSServerNotAfterKey = attribute.Key("tls.server.not_after") + + // TLSServerNotBeforeKey is the attribute Key conforming to the + // "tls.server.not_before" semantic conventions. It represents the date/Time + // indicating when server certificate is first considered valid. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1970-01-01T00:00:00.000Z" + TLSServerNotBeforeKey = attribute.Key("tls.server.not_before") + + // TLSServerSubjectKey is the attribute Key conforming to the + // "tls.server.subject" semantic conventions. It represents the distinguished + // name of subject of the x.509 certificate presented by the server. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "CN=myserver, OU=Documentation Team, DC=example, DC=com" + TLSServerSubjectKey = attribute.Key("tls.server.subject") +) + +// TLSCipher returns an attribute KeyValue conforming to the "tls.cipher" +// semantic conventions. It represents the string indicating the [cipher] used +// during the current connection. +// +// [cipher]: https://datatracker.ietf.org/doc/html/rfc5246#appendix-A.5 +func TLSCipher(val string) attribute.KeyValue { + return TLSCipherKey.String(val) +} + +// TLSClientCertificate returns an attribute KeyValue conforming to the +// "tls.client.certificate" semantic conventions. It represents the PEM-encoded +// stand-alone certificate offered by the client. This is usually +// mutually-exclusive of `client.certificate_chain` since this value also exists +// in that list. +func TLSClientCertificate(val string) attribute.KeyValue { + return TLSClientCertificateKey.String(val) +} + +// TLSClientCertificateChain returns an attribute KeyValue conforming to the +// "tls.client.certificate_chain" semantic conventions. It represents the array +// of PEM-encoded certificates that make up the certificate chain offered by the +// client. This is usually mutually-exclusive of `client.certificate` since that +// value should be the first certificate in the chain. +func TLSClientCertificateChain(val ...string) attribute.KeyValue { + return TLSClientCertificateChainKey.StringSlice(val) +} + +// TLSClientHashMd5 returns an attribute KeyValue conforming to the +// "tls.client.hash.md5" semantic conventions. It represents the certificate +// fingerprint using the MD5 digest of DER-encoded version of certificate offered +// by the client. For consistency with other hash values, this value should be +// formatted as an uppercase hash. +func TLSClientHashMd5(val string) attribute.KeyValue { + return TLSClientHashMd5Key.String(val) +} + +// TLSClientHashSha1 returns an attribute KeyValue conforming to the +// "tls.client.hash.sha1" semantic conventions. It represents the certificate +// fingerprint using the SHA1 digest of DER-encoded version of certificate +// offered by the client. For consistency with other hash values, this value +// should be formatted as an uppercase hash. +func TLSClientHashSha1(val string) attribute.KeyValue { + return TLSClientHashSha1Key.String(val) +} + +// TLSClientHashSha256 returns an attribute KeyValue conforming to the +// "tls.client.hash.sha256" semantic conventions. It represents the certificate +// fingerprint using the SHA256 digest of DER-encoded version of certificate +// offered by the client. For consistency with other hash values, this value +// should be formatted as an uppercase hash. +func TLSClientHashSha256(val string) attribute.KeyValue { + return TLSClientHashSha256Key.String(val) +} + +// TLSClientIssuer returns an attribute KeyValue conforming to the +// "tls.client.issuer" semantic conventions. It represents the distinguished name +// of [subject] of the issuer of the x.509 certificate presented by the client. +// +// [subject]: https://datatracker.ietf.org/doc/html/rfc5280#section-4.1.2.6 +func TLSClientIssuer(val string) attribute.KeyValue { + return TLSClientIssuerKey.String(val) +} + +// TLSClientJa3 returns an attribute KeyValue conforming to the "tls.client.ja3" +// semantic conventions. It represents a hash that identifies clients based on +// how they perform an SSL/TLS handshake. +func TLSClientJa3(val string) attribute.KeyValue { + return TLSClientJa3Key.String(val) +} + +// TLSClientNotAfter returns an attribute KeyValue conforming to the +// "tls.client.not_after" semantic conventions. It represents the date/Time +// indicating when client certificate is no longer considered valid. +func TLSClientNotAfter(val string) attribute.KeyValue { + return TLSClientNotAfterKey.String(val) +} + +// TLSClientNotBefore returns an attribute KeyValue conforming to the +// "tls.client.not_before" semantic conventions. It represents the date/Time +// indicating when client certificate is first considered valid. +func TLSClientNotBefore(val string) attribute.KeyValue { + return TLSClientNotBeforeKey.String(val) +} + +// TLSClientSubject returns an attribute KeyValue conforming to the +// "tls.client.subject" semantic conventions. It represents the distinguished +// name of subject of the x.509 certificate presented by the client. +func TLSClientSubject(val string) attribute.KeyValue { + return TLSClientSubjectKey.String(val) +} + +// TLSClientSupportedCiphers returns an attribute KeyValue conforming to the +// "tls.client.supported_ciphers" semantic conventions. It represents the array +// of ciphers offered by the client during the client hello. +func TLSClientSupportedCiphers(val ...string) attribute.KeyValue { + return TLSClientSupportedCiphersKey.StringSlice(val) +} + +// TLSCurve returns an attribute KeyValue conforming to the "tls.curve" semantic +// conventions. It represents the string indicating the curve used for the given +// cipher, when applicable. +func TLSCurve(val string) attribute.KeyValue { + return TLSCurveKey.String(val) +} + +// TLSEstablished returns an attribute KeyValue conforming to the +// "tls.established" semantic conventions. It represents the boolean flag +// indicating if the TLS negotiation was successful and transitioned to an +// encrypted tunnel. +func TLSEstablished(val bool) attribute.KeyValue { + return TLSEstablishedKey.Bool(val) +} + +// TLSNextProtocol returns an attribute KeyValue conforming to the +// "tls.next_protocol" semantic conventions. It represents the string indicating +// the protocol being tunneled. Per the values in the [IANA registry], this +// string should be lower case. +// +// [IANA registry]: https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids +func TLSNextProtocol(val string) attribute.KeyValue { + return TLSNextProtocolKey.String(val) +} + +// TLSProtocolVersion returns an attribute KeyValue conforming to the +// "tls.protocol.version" semantic conventions. It represents the numeric part of +// the version parsed from the original string of the negotiated +// [SSL/TLS protocol version]. +// +// [SSL/TLS protocol version]: https://docs.openssl.org/1.1.1/man3/SSL_get_version/#return-values +func TLSProtocolVersion(val string) attribute.KeyValue { + return TLSProtocolVersionKey.String(val) +} + +// TLSResumed returns an attribute KeyValue conforming to the "tls.resumed" +// semantic conventions. It represents the boolean flag indicating if this TLS +// connection was resumed from an existing TLS negotiation. +func TLSResumed(val bool) attribute.KeyValue { + return TLSResumedKey.Bool(val) +} + +// TLSServerCertificate returns an attribute KeyValue conforming to the +// "tls.server.certificate" semantic conventions. It represents the PEM-encoded +// stand-alone certificate offered by the server. This is usually +// mutually-exclusive of `server.certificate_chain` since this value also exists +// in that list. +func TLSServerCertificate(val string) attribute.KeyValue { + return TLSServerCertificateKey.String(val) +} + +// TLSServerCertificateChain returns an attribute KeyValue conforming to the +// "tls.server.certificate_chain" semantic conventions. It represents the array +// of PEM-encoded certificates that make up the certificate chain offered by the +// server. This is usually mutually-exclusive of `server.certificate` since that +// value should be the first certificate in the chain. +func TLSServerCertificateChain(val ...string) attribute.KeyValue { + return TLSServerCertificateChainKey.StringSlice(val) +} + +// TLSServerHashMd5 returns an attribute KeyValue conforming to the +// "tls.server.hash.md5" semantic conventions. It represents the certificate +// fingerprint using the MD5 digest of DER-encoded version of certificate offered +// by the server. For consistency with other hash values, this value should be +// formatted as an uppercase hash. +func TLSServerHashMd5(val string) attribute.KeyValue { + return TLSServerHashMd5Key.String(val) +} + +// TLSServerHashSha1 returns an attribute KeyValue conforming to the +// "tls.server.hash.sha1" semantic conventions. It represents the certificate +// fingerprint using the SHA1 digest of DER-encoded version of certificate +// offered by the server. For consistency with other hash values, this value +// should be formatted as an uppercase hash. +func TLSServerHashSha1(val string) attribute.KeyValue { + return TLSServerHashSha1Key.String(val) +} + +// TLSServerHashSha256 returns an attribute KeyValue conforming to the +// "tls.server.hash.sha256" semantic conventions. It represents the certificate +// fingerprint using the SHA256 digest of DER-encoded version of certificate +// offered by the server. For consistency with other hash values, this value +// should be formatted as an uppercase hash. +func TLSServerHashSha256(val string) attribute.KeyValue { + return TLSServerHashSha256Key.String(val) +} + +// TLSServerIssuer returns an attribute KeyValue conforming to the +// "tls.server.issuer" semantic conventions. It represents the distinguished name +// of [subject] of the issuer of the x.509 certificate presented by the client. +// +// [subject]: https://datatracker.ietf.org/doc/html/rfc5280#section-4.1.2.6 +func TLSServerIssuer(val string) attribute.KeyValue { + return TLSServerIssuerKey.String(val) +} + +// TLSServerJa3s returns an attribute KeyValue conforming to the +// "tls.server.ja3s" semantic conventions. It represents a hash that identifies +// servers based on how they perform an SSL/TLS handshake. +func TLSServerJa3s(val string) attribute.KeyValue { + return TLSServerJa3sKey.String(val) +} + +// TLSServerNotAfter returns an attribute KeyValue conforming to the +// "tls.server.not_after" semantic conventions. It represents the date/Time +// indicating when server certificate is no longer considered valid. +func TLSServerNotAfter(val string) attribute.KeyValue { + return TLSServerNotAfterKey.String(val) +} + +// TLSServerNotBefore returns an attribute KeyValue conforming to the +// "tls.server.not_before" semantic conventions. It represents the date/Time +// indicating when server certificate is first considered valid. +func TLSServerNotBefore(val string) attribute.KeyValue { + return TLSServerNotBeforeKey.String(val) +} + +// TLSServerSubject returns an attribute KeyValue conforming to the +// "tls.server.subject" semantic conventions. It represents the distinguished +// name of subject of the x.509 certificate presented by the server. +func TLSServerSubject(val string) attribute.KeyValue { + return TLSServerSubjectKey.String(val) +} + +// Enum values for tls.protocol.name +var ( + // ssl + // Stability: development + TLSProtocolNameSsl = TLSProtocolNameKey.String("ssl") + // tls + // Stability: development + TLSProtocolNameTLS = TLSProtocolNameKey.String("tls") +) + +// Namespace: url +const ( + // URLDomainKey is the attribute Key conforming to the "url.domain" semantic + // conventions. It represents the domain extracted from the `url.full`, such as + // "opentelemetry.io". + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "www.foo.bar", "opentelemetry.io", "3.12.167.2", + // "[1080:0:0:0:8:800:200C:417A]" + // Note: In some cases a URL may refer to an IP and/or port directly, without a + // domain name. In this case, the IP address would go to the domain field. If + // the URL contains a [literal IPv6 address] enclosed by `[` and `]`, the `[` + // and `]` characters should also be captured in the domain field. + // + // [literal IPv6 address]: https://www.rfc-editor.org/rfc/rfc2732#section-2 + URLDomainKey = attribute.Key("url.domain") + + // URLExtensionKey is the attribute Key conforming to the "url.extension" + // semantic conventions. It represents the file extension extracted from the + // `url.full`, excluding the leading dot. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "png", "gz" + // Note: The file extension is only set if it exists, as not every url has a + // file extension. When the file name has multiple extensions `example.tar.gz`, + // only the last one should be captured `gz`, not `tar.gz`. + URLExtensionKey = attribute.Key("url.extension") + + // URLFragmentKey is the attribute Key conforming to the "url.fragment" semantic + // conventions. It represents the [URI fragment] component. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "SemConv" + // + // [URI fragment]: https://www.rfc-editor.org/rfc/rfc3986#section-3.5 + URLFragmentKey = attribute.Key("url.fragment") + + // URLFullKey is the attribute Key conforming to the "url.full" semantic + // conventions. It represents the absolute URL describing a network resource + // according to [RFC3986]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "https://www.foo.bar/search?q=OpenTelemetry#SemConv", "//localhost" + // Note: For network calls, URL usually has + // `scheme://host[:port][path][?query][#fragment]` format, where the fragment + // is not transmitted over HTTP, but if it is known, it SHOULD be included + // nevertheless. + // + // `url.full` MUST NOT contain credentials passed via URL in form of + // `https://username:password@www.example.com/`. + // In such case username and password SHOULD be redacted and attribute's value + // SHOULD be `https://REDACTED:REDACTED@www.example.com/`. + // + // `url.full` SHOULD capture the absolute URL when it is available (or can be + // reconstructed). + // + // Sensitive content provided in `url.full` SHOULD be scrubbed when + // instrumentations can identify it. + // + // + // Query string values for the following keys SHOULD be redacted by default and + // replaced by the + // value `REDACTED`: + // + // - [`AWSAccessKeyId`] + // - [`Signature`] + // - [`sig`] + // - [`X-Goog-Signature`] + // + // This list is subject to change over time. + // + // When a query string value is redacted, the query string key SHOULD still be + // preserved, e.g. + // `https://www.example.com/path?color=blue&sig=REDACTED`. + // + // [RFC3986]: https://www.rfc-editor.org/rfc/rfc3986 + // [`AWSAccessKeyId`]: https://docs.aws.amazon.com/AmazonS3/latest/userguide/RESTAuthentication.html#RESTAuthenticationQueryStringAuth + // [`Signature`]: https://docs.aws.amazon.com/AmazonS3/latest/userguide/RESTAuthentication.html#RESTAuthenticationQueryStringAuth + // [`sig`]: https://learn.microsoft.com/azure/storage/common/storage-sas-overview#sas-token + // [`X-Goog-Signature`]: https://cloud.google.com/storage/docs/access-control/signed-urls + URLFullKey = attribute.Key("url.full") + + // URLOriginalKey is the attribute Key conforming to the "url.original" semantic + // conventions. It represents the unmodified original URL as seen in the event + // source. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "https://www.foo.bar/search?q=OpenTelemetry#SemConv", + // "search?q=OpenTelemetry" + // Note: In network monitoring, the observed URL may be a full URL, whereas in + // access logs, the URL is often just represented as a path. This field is meant + // to represent the URL as it was observed, complete or not. + // `url.original` might contain credentials passed via URL in form of + // `https://username:password@www.example.com/`. In such case password and + // username SHOULD NOT be redacted and attribute's value SHOULD remain the same. + URLOriginalKey = attribute.Key("url.original") + + // URLPathKey is the attribute Key conforming to the "url.path" semantic + // conventions. It represents the [URI path] component. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "/search" + // Note: Sensitive content provided in `url.path` SHOULD be scrubbed when + // instrumentations can identify it. + // + // [URI path]: https://www.rfc-editor.org/rfc/rfc3986#section-3.3 + URLPathKey = attribute.Key("url.path") + + // URLPortKey is the attribute Key conforming to the "url.port" semantic + // conventions. It represents the port extracted from the `url.full`. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 443 + URLPortKey = attribute.Key("url.port") + + // URLQueryKey is the attribute Key conforming to the "url.query" semantic + // conventions. It represents the [URI query] component. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "q=OpenTelemetry" + // Note: Sensitive content provided in `url.query` SHOULD be scrubbed when + // instrumentations can identify it. + // + // + // Query string values for the following keys SHOULD be redacted by default and + // replaced by the value `REDACTED`: + // + // - [`AWSAccessKeyId`] + // - [`Signature`] + // - [`sig`] + // - [`X-Goog-Signature`] + // + // This list is subject to change over time. + // + // When a query string value is redacted, the query string key SHOULD still be + // preserved, e.g. + // `q=OpenTelemetry&sig=REDACTED`. + // + // [URI query]: https://www.rfc-editor.org/rfc/rfc3986#section-3.4 + // [`AWSAccessKeyId`]: https://docs.aws.amazon.com/AmazonS3/latest/userguide/RESTAuthentication.html#RESTAuthenticationQueryStringAuth + // [`Signature`]: https://docs.aws.amazon.com/AmazonS3/latest/userguide/RESTAuthentication.html#RESTAuthenticationQueryStringAuth + // [`sig`]: https://learn.microsoft.com/azure/storage/common/storage-sas-overview#sas-token + // [`X-Goog-Signature`]: https://cloud.google.com/storage/docs/access-control/signed-urls + URLQueryKey = attribute.Key("url.query") + + // URLRegisteredDomainKey is the attribute Key conforming to the + // "url.registered_domain" semantic conventions. It represents the highest + // registered url domain, stripped of the subdomain. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "example.com", "foo.co.uk" + // Note: This value can be determined precisely with the [public suffix list]. + // For example, the registered domain for `foo.example.com` is `example.com`. + // Trying to approximate this by simply taking the last two labels will not work + // well for TLDs such as `co.uk`. + // + // [public suffix list]: https://publicsuffix.org/ + URLRegisteredDomainKey = attribute.Key("url.registered_domain") + + // URLSchemeKey is the attribute Key conforming to the "url.scheme" semantic + // conventions. It represents the [URI scheme] component identifying the used + // protocol. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "https", "ftp", "telnet" + // + // [URI scheme]: https://www.rfc-editor.org/rfc/rfc3986#section-3.1 + URLSchemeKey = attribute.Key("url.scheme") + + // URLSubdomainKey is the attribute Key conforming to the "url.subdomain" + // semantic conventions. It represents the subdomain portion of a fully + // qualified domain name includes all of the names except the host name under + // the registered_domain. In a partially qualified domain, or if the + // qualification level of the full name cannot be determined, subdomain contains + // all of the names below the registered domain. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "east", "sub2.sub1" + // Note: The subdomain portion of `www.east.mydomain.co.uk` is `east`. If the + // domain has multiple levels of subdomain, such as `sub2.sub1.example.com`, the + // subdomain field should contain `sub2.sub1`, with no trailing period. + URLSubdomainKey = attribute.Key("url.subdomain") + + // URLTemplateKey is the attribute Key conforming to the "url.template" semantic + // conventions. It represents the low-cardinality template of an + // [absolute path reference]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "/users/{id}", "/users/:id", "/users?id={id}" + // + // [absolute path reference]: https://www.rfc-editor.org/rfc/rfc3986#section-4.2 + URLTemplateKey = attribute.Key("url.template") + + // URLTopLevelDomainKey is the attribute Key conforming to the + // "url.top_level_domain" semantic conventions. It represents the effective top + // level domain (eTLD), also known as the domain suffix, is the last part of the + // domain name. For example, the top level domain for example.com is `com`. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "com", "co.uk" + // Note: This value can be determined precisely with the [public suffix list]. + // + // [public suffix list]: https://publicsuffix.org/ + URLTopLevelDomainKey = attribute.Key("url.top_level_domain") +) + +// URLDomain returns an attribute KeyValue conforming to the "url.domain" +// semantic conventions. It represents the domain extracted from the `url.full`, +// such as "opentelemetry.io". +func URLDomain(val string) attribute.KeyValue { + return URLDomainKey.String(val) +} + +// URLExtension returns an attribute KeyValue conforming to the "url.extension" +// semantic conventions. It represents the file extension extracted from the +// `url.full`, excluding the leading dot. +func URLExtension(val string) attribute.KeyValue { + return URLExtensionKey.String(val) +} + +// URLFragment returns an attribute KeyValue conforming to the "url.fragment" +// semantic conventions. It represents the [URI fragment] component. +// +// [URI fragment]: https://www.rfc-editor.org/rfc/rfc3986#section-3.5 +func URLFragment(val string) attribute.KeyValue { + return URLFragmentKey.String(val) +} + +// URLFull returns an attribute KeyValue conforming to the "url.full" semantic +// conventions. It represents the absolute URL describing a network resource +// according to [RFC3986]. +// +// [RFC3986]: https://www.rfc-editor.org/rfc/rfc3986 +func URLFull(val string) attribute.KeyValue { + return URLFullKey.String(val) +} + +// URLOriginal returns an attribute KeyValue conforming to the "url.original" +// semantic conventions. It represents the unmodified original URL as seen in the +// event source. +func URLOriginal(val string) attribute.KeyValue { + return URLOriginalKey.String(val) +} + +// URLPath returns an attribute KeyValue conforming to the "url.path" semantic +// conventions. It represents the [URI path] component. +// +// [URI path]: https://www.rfc-editor.org/rfc/rfc3986#section-3.3 +func URLPath(val string) attribute.KeyValue { + return URLPathKey.String(val) +} + +// URLPort returns an attribute KeyValue conforming to the "url.port" semantic +// conventions. It represents the port extracted from the `url.full`. +func URLPort(val int) attribute.KeyValue { + return URLPortKey.Int(val) +} + +// URLQuery returns an attribute KeyValue conforming to the "url.query" semantic +// conventions. It represents the [URI query] component. +// +// [URI query]: https://www.rfc-editor.org/rfc/rfc3986#section-3.4 +func URLQuery(val string) attribute.KeyValue { + return URLQueryKey.String(val) +} + +// URLRegisteredDomain returns an attribute KeyValue conforming to the +// "url.registered_domain" semantic conventions. It represents the highest +// registered url domain, stripped of the subdomain. +func URLRegisteredDomain(val string) attribute.KeyValue { + return URLRegisteredDomainKey.String(val) +} + +// URLScheme returns an attribute KeyValue conforming to the "url.scheme" +// semantic conventions. It represents the [URI scheme] component identifying the +// used protocol. +// +// [URI scheme]: https://www.rfc-editor.org/rfc/rfc3986#section-3.1 +func URLScheme(val string) attribute.KeyValue { + return URLSchemeKey.String(val) +} + +// URLSubdomain returns an attribute KeyValue conforming to the "url.subdomain" +// semantic conventions. It represents the subdomain portion of a fully qualified +// domain name includes all of the names except the host name under the +// registered_domain. In a partially qualified domain, or if the qualification +// level of the full name cannot be determined, subdomain contains all of the +// names below the registered domain. +func URLSubdomain(val string) attribute.KeyValue { + return URLSubdomainKey.String(val) +} + +// URLTemplate returns an attribute KeyValue conforming to the "url.template" +// semantic conventions. It represents the low-cardinality template of an +// [absolute path reference]. +// +// [absolute path reference]: https://www.rfc-editor.org/rfc/rfc3986#section-4.2 +func URLTemplate(val string) attribute.KeyValue { + return URLTemplateKey.String(val) +} + +// URLTopLevelDomain returns an attribute KeyValue conforming to the +// "url.top_level_domain" semantic conventions. It represents the effective top +// level domain (eTLD), also known as the domain suffix, is the last part of the +// domain name. For example, the top level domain for example.com is `com`. +func URLTopLevelDomain(val string) attribute.KeyValue { + return URLTopLevelDomainKey.String(val) +} + +// Namespace: user +const ( + // UserEmailKey is the attribute Key conforming to the "user.email" semantic + // conventions. It represents the user email address. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "a.einstein@example.com" + UserEmailKey = attribute.Key("user.email") + + // UserFullNameKey is the attribute Key conforming to the "user.full_name" + // semantic conventions. It represents the user's full name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Albert Einstein" + UserFullNameKey = attribute.Key("user.full_name") + + // UserHashKey is the attribute Key conforming to the "user.hash" semantic + // conventions. It represents the unique user hash to correlate information for + // a user in anonymized form. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "364fc68eaf4c8acec74a4e52d7d1feaa" + // Note: Useful if `user.id` or `user.name` contain confidential information and + // cannot be used. + UserHashKey = attribute.Key("user.hash") + + // UserIDKey is the attribute Key conforming to the "user.id" semantic + // conventions. It represents the unique identifier of the user. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "S-1-5-21-202424912787-2692429404-2351956786-1000" + UserIDKey = attribute.Key("user.id") + + // UserNameKey is the attribute Key conforming to the "user.name" semantic + // conventions. It represents the short name or login/username of the user. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "a.einstein" + UserNameKey = attribute.Key("user.name") + + // UserRolesKey is the attribute Key conforming to the "user.roles" semantic + // conventions. It represents the array of user roles at the time of the event. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "admin", "reporting_user" + UserRolesKey = attribute.Key("user.roles") +) + +// UserEmail returns an attribute KeyValue conforming to the "user.email" +// semantic conventions. It represents the user email address. +func UserEmail(val string) attribute.KeyValue { + return UserEmailKey.String(val) +} + +// UserFullName returns an attribute KeyValue conforming to the "user.full_name" +// semantic conventions. It represents the user's full name. +func UserFullName(val string) attribute.KeyValue { + return UserFullNameKey.String(val) +} + +// UserHash returns an attribute KeyValue conforming to the "user.hash" semantic +// conventions. It represents the unique user hash to correlate information for a +// user in anonymized form. +func UserHash(val string) attribute.KeyValue { + return UserHashKey.String(val) +} + +// UserID returns an attribute KeyValue conforming to the "user.id" semantic +// conventions. It represents the unique identifier of the user. +func UserID(val string) attribute.KeyValue { + return UserIDKey.String(val) +} + +// UserName returns an attribute KeyValue conforming to the "user.name" semantic +// conventions. It represents the short name or login/username of the user. +func UserName(val string) attribute.KeyValue { + return UserNameKey.String(val) +} + +// UserRoles returns an attribute KeyValue conforming to the "user.roles" +// semantic conventions. It represents the array of user roles at the time of the +// event. +func UserRoles(val ...string) attribute.KeyValue { + return UserRolesKey.StringSlice(val) +} + +// Namespace: user_agent +const ( + // UserAgentNameKey is the attribute Key conforming to the "user_agent.name" + // semantic conventions. It represents the name of the user-agent extracted from + // original. Usually refers to the browser's name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Safari", "YourApp" + // Note: [Example] of extracting browser's name from original string. In the + // case of using a user-agent for non-browser products, such as microservices + // with multiple names/versions inside the `user_agent.original`, the most + // significant name SHOULD be selected. In such a scenario it should align with + // `user_agent.version` + // + // [Example]: https://www.whatsmyua.info + UserAgentNameKey = attribute.Key("user_agent.name") + + // UserAgentOriginalKey is the attribute Key conforming to the + // "user_agent.original" semantic conventions. It represents the value of the + // [HTTP User-Agent] header sent by the client. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "CERN-LineMode/2.15 libwww/2.17b3", "Mozilla/5.0 (iPhone; CPU + // iPhone OS 14_7_1 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) + // Version/14.1.2 Mobile/15E148 Safari/604.1", "YourApp/1.0.0 + // grpc-java-okhttp/1.27.2" + // + // [HTTP User-Agent]: https://www.rfc-editor.org/rfc/rfc9110.html#field.user-agent + UserAgentOriginalKey = attribute.Key("user_agent.original") + + // UserAgentOSNameKey is the attribute Key conforming to the + // "user_agent.os.name" semantic conventions. It represents the human readable + // operating system name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "iOS", "Android", "Ubuntu" + // Note: For mapping user agent strings to OS names, libraries such as + // [ua-parser] can be utilized. + // + // [ua-parser]: https://github.com/ua-parser + UserAgentOSNameKey = attribute.Key("user_agent.os.name") + + // UserAgentOSVersionKey is the attribute Key conforming to the + // "user_agent.os.version" semantic conventions. It represents the version + // string of the operating system as defined in [Version Attributes]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "14.2.1", "18.04.1" + // Note: For mapping user agent strings to OS versions, libraries such as + // [ua-parser] can be utilized. + // + // [Version Attributes]: /docs/resource/README.md#version-attributes + // [ua-parser]: https://github.com/ua-parser + UserAgentOSVersionKey = attribute.Key("user_agent.os.version") + + // UserAgentSyntheticTypeKey is the attribute Key conforming to the + // "user_agent.synthetic.type" semantic conventions. It represents the specifies + // the category of synthetic traffic, such as tests or bots. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: This attribute MAY be derived from the contents of the + // `user_agent.original` attribute. Components that populate the attribute are + // responsible for determining what they consider to be synthetic bot or test + // traffic. This attribute can either be set for self-identification purposes, + // or on telemetry detected to be generated as a result of a synthetic request. + // This attribute is useful for distinguishing between genuine client traffic + // and synthetic traffic generated by bots or tests. + UserAgentSyntheticTypeKey = attribute.Key("user_agent.synthetic.type") + + // UserAgentVersionKey is the attribute Key conforming to the + // "user_agent.version" semantic conventions. It represents the version of the + // user-agent extracted from original. Usually refers to the browser's version. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "14.1.2", "1.0.0" + // Note: [Example] of extracting browser's version from original string. In the + // case of using a user-agent for non-browser products, such as microservices + // with multiple names/versions inside the `user_agent.original`, the most + // significant version SHOULD be selected. In such a scenario it should align + // with `user_agent.name` + // + // [Example]: https://www.whatsmyua.info + UserAgentVersionKey = attribute.Key("user_agent.version") +) + +// UserAgentName returns an attribute KeyValue conforming to the +// "user_agent.name" semantic conventions. It represents the name of the +// user-agent extracted from original. Usually refers to the browser's name. +func UserAgentName(val string) attribute.KeyValue { + return UserAgentNameKey.String(val) +} + +// UserAgentOriginal returns an attribute KeyValue conforming to the +// "user_agent.original" semantic conventions. It represents the value of the +// [HTTP User-Agent] header sent by the client. +// +// [HTTP User-Agent]: https://www.rfc-editor.org/rfc/rfc9110.html#field.user-agent +func UserAgentOriginal(val string) attribute.KeyValue { + return UserAgentOriginalKey.String(val) +} + +// UserAgentOSName returns an attribute KeyValue conforming to the +// "user_agent.os.name" semantic conventions. It represents the human readable +// operating system name. +func UserAgentOSName(val string) attribute.KeyValue { + return UserAgentOSNameKey.String(val) +} + +// UserAgentOSVersion returns an attribute KeyValue conforming to the +// "user_agent.os.version" semantic conventions. It represents the version string +// of the operating system as defined in [Version Attributes]. +// +// [Version Attributes]: /docs/resource/README.md#version-attributes +func UserAgentOSVersion(val string) attribute.KeyValue { + return UserAgentOSVersionKey.String(val) +} + +// UserAgentVersion returns an attribute KeyValue conforming to the +// "user_agent.version" semantic conventions. It represents the version of the +// user-agent extracted from original. Usually refers to the browser's version. +func UserAgentVersion(val string) attribute.KeyValue { + return UserAgentVersionKey.String(val) +} + +// Enum values for user_agent.synthetic.type +var ( + // Bot source. + // Stability: development + UserAgentSyntheticTypeBot = UserAgentSyntheticTypeKey.String("bot") + // Synthetic test source. + // Stability: development + UserAgentSyntheticTypeTest = UserAgentSyntheticTypeKey.String("test") +) + +// Namespace: vcs +const ( + // VCSChangeIDKey is the attribute Key conforming to the "vcs.change.id" + // semantic conventions. It represents the ID of the change (pull request/merge + // request/changelist) if applicable. This is usually a unique (within + // repository) identifier generated by the VCS system. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "123" + VCSChangeIDKey = attribute.Key("vcs.change.id") + + // VCSChangeStateKey is the attribute Key conforming to the "vcs.change.state" + // semantic conventions. It represents the state of the change (pull + // request/merge request/changelist). + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "open", "closed", "merged" + VCSChangeStateKey = attribute.Key("vcs.change.state") + + // VCSChangeTitleKey is the attribute Key conforming to the "vcs.change.title" + // semantic conventions. It represents the human readable title of the change + // (pull request/merge request/changelist). This title is often a brief summary + // of the change and may get merged in to a ref as the commit summary. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Fixes broken thing", "feat: add my new feature", "[chore] update + // dependency" + VCSChangeTitleKey = attribute.Key("vcs.change.title") + + // VCSLineChangeTypeKey is the attribute Key conforming to the + // "vcs.line_change.type" semantic conventions. It represents the type of line + // change being measured on a branch or change. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "added", "removed" + VCSLineChangeTypeKey = attribute.Key("vcs.line_change.type") + + // VCSOwnerNameKey is the attribute Key conforming to the "vcs.owner.name" + // semantic conventions. It represents the group owner within the version + // control system. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-org", "myteam", "business-unit" + VCSOwnerNameKey = attribute.Key("vcs.owner.name") + + // VCSProviderNameKey is the attribute Key conforming to the "vcs.provider.name" + // semantic conventions. It represents the name of the version control system + // provider. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "github", "gitlab", "gitea", "bitbucket" + VCSProviderNameKey = attribute.Key("vcs.provider.name") + + // VCSRefBaseNameKey is the attribute Key conforming to the "vcs.ref.base.name" + // semantic conventions. It represents the name of the [reference] such as + // **branch** or **tag** in the repository. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-feature-branch", "tag-1-test" + // Note: `base` refers to the starting point of a change. For example, `main` + // would be the base reference of type branch if you've created a new + // reference of type branch from it and created new commits. + // + // [reference]: https://git-scm.com/docs/gitglossary#def_ref + VCSRefBaseNameKey = attribute.Key("vcs.ref.base.name") + + // VCSRefBaseRevisionKey is the attribute Key conforming to the + // "vcs.ref.base.revision" semantic conventions. It represents the revision, + // literally [revised version], The revision most often refers to a commit + // object in Git, or a revision number in SVN. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "9d59409acf479dfa0df1aa568182e43e43df8bbe28d60fcf2bc52e30068802cc", + // "main", "123", "HEAD" + // Note: `base` refers to the starting point of a change. For example, `main` + // would be the base reference of type branch if you've created a new + // reference of type branch from it and created new commits. The + // revision can be a full [hash value (see + // glossary)], + // of the recorded change to a ref within a repository pointing to a + // commit [commit] object. It does + // not necessarily have to be a hash; it can simply define a [revision + // number] + // which is an integer that is monotonically increasing. In cases where + // it is identical to the `ref.base.name`, it SHOULD still be included. + // It is up to the implementer to decide which value to set as the + // revision based on the VCS system and situational context. + // + // [revised version]: https://www.merriam-webster.com/dictionary/revision + // [hash value (see + // glossary)]: https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-5.pdf + // [commit]: https://git-scm.com/docs/git-commit + // [revision + // number]: https://svnbook.red-bean.com/en/1.7/svn.tour.revs.specifiers.html + VCSRefBaseRevisionKey = attribute.Key("vcs.ref.base.revision") + + // VCSRefBaseTypeKey is the attribute Key conforming to the "vcs.ref.base.type" + // semantic conventions. It represents the type of the [reference] in the + // repository. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "branch", "tag" + // Note: `base` refers to the starting point of a change. For example, `main` + // would be the base reference of type branch if you've created a new + // reference of type branch from it and created new commits. + // + // [reference]: https://git-scm.com/docs/gitglossary#def_ref + VCSRefBaseTypeKey = attribute.Key("vcs.ref.base.type") + + // VCSRefHeadNameKey is the attribute Key conforming to the "vcs.ref.head.name" + // semantic conventions. It represents the name of the [reference] such as + // **branch** or **tag** in the repository. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-feature-branch", "tag-1-test" + // Note: `head` refers to where you are right now; the current reference at a + // given time. + // + // [reference]: https://git-scm.com/docs/gitglossary#def_ref + VCSRefHeadNameKey = attribute.Key("vcs.ref.head.name") + + // VCSRefHeadRevisionKey is the attribute Key conforming to the + // "vcs.ref.head.revision" semantic conventions. It represents the revision, + // literally [revised version], The revision most often refers to a commit + // object in Git, or a revision number in SVN. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "9d59409acf479dfa0df1aa568182e43e43df8bbe28d60fcf2bc52e30068802cc", + // "main", "123", "HEAD" + // Note: `head` refers to where you are right now; the current reference at a + // given time.The revision can be a full [hash value (see + // glossary)], + // of the recorded change to a ref within a repository pointing to a + // commit [commit] object. It does + // not necessarily have to be a hash; it can simply define a [revision + // number] + // which is an integer that is monotonically increasing. In cases where + // it is identical to the `ref.head.name`, it SHOULD still be included. + // It is up to the implementer to decide which value to set as the + // revision based on the VCS system and situational context. + // + // [revised version]: https://www.merriam-webster.com/dictionary/revision + // [hash value (see + // glossary)]: https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-5.pdf + // [commit]: https://git-scm.com/docs/git-commit + // [revision + // number]: https://svnbook.red-bean.com/en/1.7/svn.tour.revs.specifiers.html + VCSRefHeadRevisionKey = attribute.Key("vcs.ref.head.revision") + + // VCSRefHeadTypeKey is the attribute Key conforming to the "vcs.ref.head.type" + // semantic conventions. It represents the type of the [reference] in the + // repository. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "branch", "tag" + // Note: `head` refers to where you are right now; the current reference at a + // given time. + // + // [reference]: https://git-scm.com/docs/gitglossary#def_ref + VCSRefHeadTypeKey = attribute.Key("vcs.ref.head.type") + + // VCSRefTypeKey is the attribute Key conforming to the "vcs.ref.type" semantic + // conventions. It represents the type of the [reference] in the repository. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "branch", "tag" + // + // [reference]: https://git-scm.com/docs/gitglossary#def_ref + VCSRefTypeKey = attribute.Key("vcs.ref.type") + + // VCSRepositoryNameKey is the attribute Key conforming to the + // "vcs.repository.name" semantic conventions. It represents the human readable + // name of the repository. It SHOULD NOT include any additional identifier like + // Group/SubGroup in GitLab or organization in GitHub. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "semantic-conventions", "my-cool-repo" + // Note: Due to it only being the name, it can clash with forks of the same + // repository if collecting telemetry across multiple orgs or groups in + // the same backends. + VCSRepositoryNameKey = attribute.Key("vcs.repository.name") + + // VCSRepositoryURLFullKey is the attribute Key conforming to the + // "vcs.repository.url.full" semantic conventions. It represents the + // [canonical URL] of the repository providing the complete HTTP(S) address in + // order to locate and identify the repository through a browser. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "https://github.com/opentelemetry/open-telemetry-collector-contrib", + // "https://gitlab.com/my-org/my-project/my-projects-project/repo" + // Note: In Git Version Control Systems, the canonical URL SHOULD NOT include + // the `.git` extension. + // + // [canonical URL]: https://support.google.com/webmasters/answer/10347851?hl=en#:~:text=A%20canonical%20URL%20is%20the,Google%20chooses%20one%20as%20canonical. + VCSRepositoryURLFullKey = attribute.Key("vcs.repository.url.full") + + // VCSRevisionDeltaDirectionKey is the attribute Key conforming to the + // "vcs.revision_delta.direction" semantic conventions. It represents the type + // of revision comparison. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "ahead", "behind" + VCSRevisionDeltaDirectionKey = attribute.Key("vcs.revision_delta.direction") +) + +// VCSChangeID returns an attribute KeyValue conforming to the "vcs.change.id" +// semantic conventions. It represents the ID of the change (pull request/merge +// request/changelist) if applicable. This is usually a unique (within +// repository) identifier generated by the VCS system. +func VCSChangeID(val string) attribute.KeyValue { + return VCSChangeIDKey.String(val) +} + +// VCSChangeTitle returns an attribute KeyValue conforming to the +// "vcs.change.title" semantic conventions. It represents the human readable +// title of the change (pull request/merge request/changelist). This title is +// often a brief summary of the change and may get merged in to a ref as the +// commit summary. +func VCSChangeTitle(val string) attribute.KeyValue { + return VCSChangeTitleKey.String(val) +} + +// VCSOwnerName returns an attribute KeyValue conforming to the "vcs.owner.name" +// semantic conventions. It represents the group owner within the version control +// system. +func VCSOwnerName(val string) attribute.KeyValue { + return VCSOwnerNameKey.String(val) +} + +// VCSRefBaseName returns an attribute KeyValue conforming to the +// "vcs.ref.base.name" semantic conventions. It represents the name of the +// [reference] such as **branch** or **tag** in the repository. +// +// [reference]: https://git-scm.com/docs/gitglossary#def_ref +func VCSRefBaseName(val string) attribute.KeyValue { + return VCSRefBaseNameKey.String(val) +} + +// VCSRefBaseRevision returns an attribute KeyValue conforming to the +// "vcs.ref.base.revision" semantic conventions. It represents the revision, +// literally [revised version], The revision most often refers to a commit object +// in Git, or a revision number in SVN. +// +// [revised version]: https://www.merriam-webster.com/dictionary/revision +func VCSRefBaseRevision(val string) attribute.KeyValue { + return VCSRefBaseRevisionKey.String(val) +} + +// VCSRefHeadName returns an attribute KeyValue conforming to the +// "vcs.ref.head.name" semantic conventions. It represents the name of the +// [reference] such as **branch** or **tag** in the repository. +// +// [reference]: https://git-scm.com/docs/gitglossary#def_ref +func VCSRefHeadName(val string) attribute.KeyValue { + return VCSRefHeadNameKey.String(val) +} + +// VCSRefHeadRevision returns an attribute KeyValue conforming to the +// "vcs.ref.head.revision" semantic conventions. It represents the revision, +// literally [revised version], The revision most often refers to a commit object +// in Git, or a revision number in SVN. +// +// [revised version]: https://www.merriam-webster.com/dictionary/revision +func VCSRefHeadRevision(val string) attribute.KeyValue { + return VCSRefHeadRevisionKey.String(val) +} + +// VCSRepositoryName returns an attribute KeyValue conforming to the +// "vcs.repository.name" semantic conventions. It represents the human readable +// name of the repository. It SHOULD NOT include any additional identifier like +// Group/SubGroup in GitLab or organization in GitHub. +func VCSRepositoryName(val string) attribute.KeyValue { + return VCSRepositoryNameKey.String(val) +} + +// VCSRepositoryURLFull returns an attribute KeyValue conforming to the +// "vcs.repository.url.full" semantic conventions. It represents the +// [canonical URL] of the repository providing the complete HTTP(S) address in +// order to locate and identify the repository through a browser. +// +// [canonical URL]: https://support.google.com/webmasters/answer/10347851?hl=en#:~:text=A%20canonical%20URL%20is%20the,Google%20chooses%20one%20as%20canonical. +func VCSRepositoryURLFull(val string) attribute.KeyValue { + return VCSRepositoryURLFullKey.String(val) +} + +// Enum values for vcs.change.state +var ( + // Open means the change is currently active and under review. It hasn't been + // merged into the target branch yet, and it's still possible to make changes or + // add comments. + // Stability: development + VCSChangeStateOpen = VCSChangeStateKey.String("open") + // WIP (work-in-progress, draft) means the change is still in progress and not + // yet ready for a full review. It might still undergo significant changes. + // Stability: development + VCSChangeStateWip = VCSChangeStateKey.String("wip") + // Closed means the merge request has been closed without merging. This can + // happen for various reasons, such as the changes being deemed unnecessary, the + // issue being resolved in another way, or the author deciding to withdraw the + // request. + // Stability: development + VCSChangeStateClosed = VCSChangeStateKey.String("closed") + // Merged indicates that the change has been successfully integrated into the + // target codebase. + // Stability: development + VCSChangeStateMerged = VCSChangeStateKey.String("merged") +) + +// Enum values for vcs.line_change.type +var ( + // How many lines were added. + // Stability: development + VCSLineChangeTypeAdded = VCSLineChangeTypeKey.String("added") + // How many lines were removed. + // Stability: development + VCSLineChangeTypeRemoved = VCSLineChangeTypeKey.String("removed") +) + +// Enum values for vcs.provider.name +var ( + // [GitHub] + // Stability: development + // + // [GitHub]: https://github.com + VCSProviderNameGithub = VCSProviderNameKey.String("github") + // [GitLab] + // Stability: development + // + // [GitLab]: https://gitlab.com + VCSProviderNameGitlab = VCSProviderNameKey.String("gitlab") + // Deprecated: Replaced by `gitea`. + VCSProviderNameGittea = VCSProviderNameKey.String("gittea") + // [Gitea] + // Stability: development + // + // [Gitea]: https://gitea.io + VCSProviderNameGitea = VCSProviderNameKey.String("gitea") + // [Bitbucket] + // Stability: development + // + // [Bitbucket]: https://bitbucket.org + VCSProviderNameBitbucket = VCSProviderNameKey.String("bitbucket") +) + +// Enum values for vcs.ref.base.type +var ( + // [branch] + // Stability: development + // + // [branch]: https://git-scm.com/docs/gitglossary#Documentation/gitglossary.txt-aiddefbranchabranch + VCSRefBaseTypeBranch = VCSRefBaseTypeKey.String("branch") + // [tag] + // Stability: development + // + // [tag]: https://git-scm.com/docs/gitglossary#Documentation/gitglossary.txt-aiddeftagatag + VCSRefBaseTypeTag = VCSRefBaseTypeKey.String("tag") +) + +// Enum values for vcs.ref.head.type +var ( + // [branch] + // Stability: development + // + // [branch]: https://git-scm.com/docs/gitglossary#Documentation/gitglossary.txt-aiddefbranchabranch + VCSRefHeadTypeBranch = VCSRefHeadTypeKey.String("branch") + // [tag] + // Stability: development + // + // [tag]: https://git-scm.com/docs/gitglossary#Documentation/gitglossary.txt-aiddeftagatag + VCSRefHeadTypeTag = VCSRefHeadTypeKey.String("tag") +) + +// Enum values for vcs.ref.type +var ( + // [branch] + // Stability: development + // + // [branch]: https://git-scm.com/docs/gitglossary#Documentation/gitglossary.txt-aiddefbranchabranch + VCSRefTypeBranch = VCSRefTypeKey.String("branch") + // [tag] + // Stability: development + // + // [tag]: https://git-scm.com/docs/gitglossary#Documentation/gitglossary.txt-aiddeftagatag + VCSRefTypeTag = VCSRefTypeKey.String("tag") +) + +// Enum values for vcs.revision_delta.direction +var ( + // How many revisions the change is behind the target ref. + // Stability: development + VCSRevisionDeltaDirectionBehind = VCSRevisionDeltaDirectionKey.String("behind") + // How many revisions the change is ahead of the target ref. + // Stability: development + VCSRevisionDeltaDirectionAhead = VCSRevisionDeltaDirectionKey.String("ahead") +) + +// Namespace: webengine +const ( + // WebEngineDescriptionKey is the attribute Key conforming to the + // "webengine.description" semantic conventions. It represents the additional + // description of the web engine (e.g. detailed version and edition + // information). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "WildFly Full 21.0.0.Final (WildFly Core 13.0.1.Final) - + // 2.2.2.Final" + WebEngineDescriptionKey = attribute.Key("webengine.description") + + // WebEngineNameKey is the attribute Key conforming to the "webengine.name" + // semantic conventions. It represents the name of the web engine. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "WildFly" + WebEngineNameKey = attribute.Key("webengine.name") + + // WebEngineVersionKey is the attribute Key conforming to the + // "webengine.version" semantic conventions. It represents the version of the + // web engine. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "21.0.0" + WebEngineVersionKey = attribute.Key("webengine.version") +) + +// WebEngineDescription returns an attribute KeyValue conforming to the +// "webengine.description" semantic conventions. It represents the additional +// description of the web engine (e.g. detailed version and edition information). +func WebEngineDescription(val string) attribute.KeyValue { + return WebEngineDescriptionKey.String(val) +} + +// WebEngineName returns an attribute KeyValue conforming to the "webengine.name" +// semantic conventions. It represents the name of the web engine. +func WebEngineName(val string) attribute.KeyValue { + return WebEngineNameKey.String(val) +} + +// WebEngineVersion returns an attribute KeyValue conforming to the +// "webengine.version" semantic conventions. It represents the version of the web +// engine. +func WebEngineVersion(val string) attribute.KeyValue { + return WebEngineVersionKey.String(val) +} \ No newline at end of file diff --git a/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/doc.go b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/doc.go new file mode 100644 index 00000000..2c5c7ebd --- /dev/null +++ b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/doc.go @@ -0,0 +1,9 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +// Package semconv implements OpenTelemetry semantic conventions. +// +// OpenTelemetry semantic conventions are agreed standardized naming +// patterns for OpenTelemetry things. This package represents the v1.34.0 +// version of the OpenTelemetry semantic conventions. +package semconv // import "go.opentelemetry.io/otel/semconv/v1.34.0" diff --git a/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/exception.go b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/exception.go new file mode 100644 index 00000000..88a998f1 --- /dev/null +++ b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/exception.go @@ -0,0 +1,9 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package semconv // import "go.opentelemetry.io/otel/semconv/v1.34.0" + +const ( + // ExceptionEventName is the name of the Span event representing an exception. + ExceptionEventName = "exception" +) diff --git a/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/schema.go b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/schema.go new file mode 100644 index 00000000..3c23d459 --- /dev/null +++ b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/schema.go @@ -0,0 +1,9 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package semconv // import "go.opentelemetry.io/otel/semconv/v1.34.0" + +// SchemaURL is the schema URL that matches the version of the semantic conventions +// that this package defines. Semconv packages starting from v1.4.0 must declare +// non-empty schema URL in the form https://opentelemetry.io/schemas/ +const SchemaURL = "https://opentelemetry.io/schemas/1.34.0" diff --git a/vendor/go.opentelemetry.io/otel/trace/auto.go b/vendor/go.opentelemetry.io/otel/trace/auto.go index d90af8f6..f3aa3981 100644 --- a/vendor/go.opentelemetry.io/otel/trace/auto.go +++ b/vendor/go.opentelemetry.io/otel/trace/auto.go @@ -20,7 +20,7 @@ import ( "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/codes" - semconv "go.opentelemetry.io/otel/semconv/v1.26.0" + semconv "go.opentelemetry.io/otel/semconv/v1.34.0" "go.opentelemetry.io/otel/trace/embedded" "go.opentelemetry.io/otel/trace/internal/telemetry" ) diff --git a/vendor/go.opentelemetry.io/otel/version.go b/vendor/go.opentelemetry.io/otel/version.go index ac3c0b15..7afe92b5 100644 --- a/vendor/go.opentelemetry.io/otel/version.go +++ b/vendor/go.opentelemetry.io/otel/version.go @@ -5,5 +5,5 @@ package otel // import "go.opentelemetry.io/otel" // Version is the current release version of OpenTelemetry in use. func Version() string { - return "1.36.0" + return "1.37.0" } diff --git a/vendor/go.opentelemetry.io/otel/versions.yaml b/vendor/go.opentelemetry.io/otel/versions.yaml index 79f82f3d..9d4742a1 100644 --- a/vendor/go.opentelemetry.io/otel/versions.yaml +++ b/vendor/go.opentelemetry.io/otel/versions.yaml @@ -3,13 +3,12 @@ module-sets: stable-v1: - version: v1.36.0 + version: v1.37.0 modules: - go.opentelemetry.io/otel - go.opentelemetry.io/otel/bridge/opencensus - go.opentelemetry.io/otel/bridge/opencensus/test - go.opentelemetry.io/otel/bridge/opentracing - - go.opentelemetry.io/otel/bridge/opentracing/test - go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc - go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp - go.opentelemetry.io/otel/exporters/otlp/otlptrace @@ -23,14 +22,16 @@ module-sets: - go.opentelemetry.io/otel/sdk/metric - go.opentelemetry.io/otel/trace experimental-metrics: - version: v0.58.0 + version: v0.59.0 modules: - go.opentelemetry.io/otel/exporters/prometheus experimental-logs: - version: v0.12.0 + version: v0.13.0 modules: - go.opentelemetry.io/otel/log + - go.opentelemetry.io/otel/log/logtest - go.opentelemetry.io/otel/sdk/log + - go.opentelemetry.io/otel/sdk/log/logtest - go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc - go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp - go.opentelemetry.io/otel/exporters/stdout/stdoutlog @@ -40,6 +41,4 @@ module-sets: - go.opentelemetry.io/otel/schema excluded-modules: - go.opentelemetry.io/otel/internal/tools - - go.opentelemetry.io/otel/log/logtest - - go.opentelemetry.io/otel/sdk/log/logtest - go.opentelemetry.io/otel/trace/internal/telemetry/test diff --git a/vendor/golang.org/x/mod/LICENSE b/vendor/golang.org/x/mod/LICENSE deleted file mode 100644 index 2a7cf70d..00000000 --- a/vendor/golang.org/x/mod/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright 2009 The Go Authors. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google LLC nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/mod/PATENTS b/vendor/golang.org/x/mod/PATENTS deleted file mode 100644 index 73309904..00000000 --- a/vendor/golang.org/x/mod/PATENTS +++ /dev/null @@ -1,22 +0,0 @@ -Additional IP Rights Grant (Patents) - -"This implementation" means the copyrightable works distributed by -Google as part of the Go project. - -Google hereby grants to You a perpetual, worldwide, non-exclusive, -no-charge, royalty-free, irrevocable (except as stated in this section) -patent license to make, have made, use, offer to sell, sell, import, -transfer and otherwise run, modify and propagate the contents of this -implementation of Go, where such license applies only to those patent -claims, both currently owned or controlled by Google and acquired in -the future, licensable by Google that are necessarily infringed by this -implementation of Go. This grant does not include claims that would be -infringed only as a consequence of further modification of this -implementation. If you or your agent or exclusive licensee institute or -order or agree to the institution of patent litigation against any -entity (including a cross-claim or counterclaim in a lawsuit) alleging -that this implementation of Go or any code incorporated within this -implementation of Go constitutes direct or contributory patent -infringement, or inducement of patent infringement, then any patent -rights granted to you under this License for this implementation of Go -shall terminate as of the date such litigation is filed. diff --git a/vendor/golang.org/x/mod/semver/semver.go b/vendor/golang.org/x/mod/semver/semver.go deleted file mode 100644 index 628f8fd6..00000000 --- a/vendor/golang.org/x/mod/semver/semver.go +++ /dev/null @@ -1,407 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package semver implements comparison of semantic version strings. -// In this package, semantic version strings must begin with a leading "v", -// as in "v1.0.0". -// -// The general form of a semantic version string accepted by this package is -// -// vMAJOR[.MINOR[.PATCH[-PRERELEASE][+BUILD]]] -// -// where square brackets indicate optional parts of the syntax; -// MAJOR, MINOR, and PATCH are decimal integers without extra leading zeros; -// PRERELEASE and BUILD are each a series of non-empty dot-separated identifiers -// using only alphanumeric characters and hyphens; and -// all-numeric PRERELEASE identifiers must not have leading zeros. -// -// This package follows Semantic Versioning 2.0.0 (see semver.org) -// with two exceptions. First, it requires the "v" prefix. Second, it recognizes -// vMAJOR and vMAJOR.MINOR (with no prerelease or build suffixes) -// as shorthands for vMAJOR.0.0 and vMAJOR.MINOR.0. -package semver - -import ( - "slices" - "strings" -) - -// parsed returns the parsed form of a semantic version string. -type parsed struct { - major string - minor string - patch string - short string - prerelease string - build string -} - -// IsValid reports whether v is a valid semantic version string. -func IsValid(v string) bool { - _, ok := parse(v) - return ok -} - -// Canonical returns the canonical formatting of the semantic version v. -// It fills in any missing .MINOR or .PATCH and discards build metadata. -// Two semantic versions compare equal only if their canonical formattings -// are identical strings. -// The canonical invalid semantic version is the empty string. -func Canonical(v string) string { - p, ok := parse(v) - if !ok { - return "" - } - if p.build != "" { - return v[:len(v)-len(p.build)] - } - if p.short != "" { - return v + p.short - } - return v -} - -// Major returns the major version prefix of the semantic version v. -// For example, Major("v2.1.0") == "v2". -// If v is an invalid semantic version string, Major returns the empty string. -func Major(v string) string { - pv, ok := parse(v) - if !ok { - return "" - } - return v[:1+len(pv.major)] -} - -// MajorMinor returns the major.minor version prefix of the semantic version v. -// For example, MajorMinor("v2.1.0") == "v2.1". -// If v is an invalid semantic version string, MajorMinor returns the empty string. -func MajorMinor(v string) string { - pv, ok := parse(v) - if !ok { - return "" - } - i := 1 + len(pv.major) - if j := i + 1 + len(pv.minor); j <= len(v) && v[i] == '.' && v[i+1:j] == pv.minor { - return v[:j] - } - return v[:i] + "." + pv.minor -} - -// Prerelease returns the prerelease suffix of the semantic version v. -// For example, Prerelease("v2.1.0-pre+meta") == "-pre". -// If v is an invalid semantic version string, Prerelease returns the empty string. -func Prerelease(v string) string { - pv, ok := parse(v) - if !ok { - return "" - } - return pv.prerelease -} - -// Build returns the build suffix of the semantic version v. -// For example, Build("v2.1.0+meta") == "+meta". -// If v is an invalid semantic version string, Build returns the empty string. -func Build(v string) string { - pv, ok := parse(v) - if !ok { - return "" - } - return pv.build -} - -// Compare returns an integer comparing two versions according to -// semantic version precedence. -// The result will be 0 if v == w, -1 if v < w, or +1 if v > w. -// -// An invalid semantic version string is considered less than a valid one. -// All invalid semantic version strings compare equal to each other. -func Compare(v, w string) int { - pv, ok1 := parse(v) - pw, ok2 := parse(w) - if !ok1 && !ok2 { - return 0 - } - if !ok1 { - return -1 - } - if !ok2 { - return +1 - } - if c := compareInt(pv.major, pw.major); c != 0 { - return c - } - if c := compareInt(pv.minor, pw.minor); c != 0 { - return c - } - if c := compareInt(pv.patch, pw.patch); c != 0 { - return c - } - return comparePrerelease(pv.prerelease, pw.prerelease) -} - -// Max canonicalizes its arguments and then returns the version string -// that compares greater. -// -// Deprecated: use [Compare] instead. In most cases, returning a canonicalized -// version is not expected or desired. -func Max(v, w string) string { - v = Canonical(v) - w = Canonical(w) - if Compare(v, w) > 0 { - return v - } - return w -} - -// ByVersion implements [sort.Interface] for sorting semantic version strings. -type ByVersion []string - -func (vs ByVersion) Len() int { return len(vs) } -func (vs ByVersion) Swap(i, j int) { vs[i], vs[j] = vs[j], vs[i] } -func (vs ByVersion) Less(i, j int) bool { return compareVersion(vs[i], vs[j]) < 0 } - -// Sort sorts a list of semantic version strings using [Compare] and falls back -// to use [strings.Compare] if both versions are considered equal. -func Sort(list []string) { - slices.SortFunc(list, compareVersion) -} - -func compareVersion(a, b string) int { - cmp := Compare(a, b) - if cmp != 0 { - return cmp - } - return strings.Compare(a, b) -} - -func parse(v string) (p parsed, ok bool) { - if v == "" || v[0] != 'v' { - return - } - p.major, v, ok = parseInt(v[1:]) - if !ok { - return - } - if v == "" { - p.minor = "0" - p.patch = "0" - p.short = ".0.0" - return - } - if v[0] != '.' { - ok = false - return - } - p.minor, v, ok = parseInt(v[1:]) - if !ok { - return - } - if v == "" { - p.patch = "0" - p.short = ".0" - return - } - if v[0] != '.' { - ok = false - return - } - p.patch, v, ok = parseInt(v[1:]) - if !ok { - return - } - if len(v) > 0 && v[0] == '-' { - p.prerelease, v, ok = parsePrerelease(v) - if !ok { - return - } - } - if len(v) > 0 && v[0] == '+' { - p.build, v, ok = parseBuild(v) - if !ok { - return - } - } - if v != "" { - ok = false - return - } - ok = true - return -} - -func parseInt(v string) (t, rest string, ok bool) { - if v == "" { - return - } - if v[0] < '0' || '9' < v[0] { - return - } - i := 1 - for i < len(v) && '0' <= v[i] && v[i] <= '9' { - i++ - } - if v[0] == '0' && i != 1 { - return - } - return v[:i], v[i:], true -} - -func parsePrerelease(v string) (t, rest string, ok bool) { - // "A pre-release version MAY be denoted by appending a hyphen and - // a series of dot separated identifiers immediately following the patch version. - // Identifiers MUST comprise only ASCII alphanumerics and hyphen [0-9A-Za-z-]. - // Identifiers MUST NOT be empty. Numeric identifiers MUST NOT include leading zeroes." - if v == "" || v[0] != '-' { - return - } - i := 1 - start := 1 - for i < len(v) && v[i] != '+' { - if !isIdentChar(v[i]) && v[i] != '.' { - return - } - if v[i] == '.' { - if start == i || isBadNum(v[start:i]) { - return - } - start = i + 1 - } - i++ - } - if start == i || isBadNum(v[start:i]) { - return - } - return v[:i], v[i:], true -} - -func parseBuild(v string) (t, rest string, ok bool) { - if v == "" || v[0] != '+' { - return - } - i := 1 - start := 1 - for i < len(v) { - if !isIdentChar(v[i]) && v[i] != '.' { - return - } - if v[i] == '.' { - if start == i { - return - } - start = i + 1 - } - i++ - } - if start == i { - return - } - return v[:i], v[i:], true -} - -func isIdentChar(c byte) bool { - return 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' || '0' <= c && c <= '9' || c == '-' -} - -func isBadNum(v string) bool { - i := 0 - for i < len(v) && '0' <= v[i] && v[i] <= '9' { - i++ - } - return i == len(v) && i > 1 && v[0] == '0' -} - -func isNum(v string) bool { - i := 0 - for i < len(v) && '0' <= v[i] && v[i] <= '9' { - i++ - } - return i == len(v) -} - -func compareInt(x, y string) int { - if x == y { - return 0 - } - if len(x) < len(y) { - return -1 - } - if len(x) > len(y) { - return +1 - } - if x < y { - return -1 - } else { - return +1 - } -} - -func comparePrerelease(x, y string) int { - // "When major, minor, and patch are equal, a pre-release version has - // lower precedence than a normal version. - // Example: 1.0.0-alpha < 1.0.0. - // Precedence for two pre-release versions with the same major, minor, - // and patch version MUST be determined by comparing each dot separated - // identifier from left to right until a difference is found as follows: - // identifiers consisting of only digits are compared numerically and - // identifiers with letters or hyphens are compared lexically in ASCII - // sort order. Numeric identifiers always have lower precedence than - // non-numeric identifiers. A larger set of pre-release fields has a - // higher precedence than a smaller set, if all of the preceding - // identifiers are equal. - // Example: 1.0.0-alpha < 1.0.0-alpha.1 < 1.0.0-alpha.beta < - // 1.0.0-beta < 1.0.0-beta.2 < 1.0.0-beta.11 < 1.0.0-rc.1 < 1.0.0." - if x == y { - return 0 - } - if x == "" { - return +1 - } - if y == "" { - return -1 - } - for x != "" && y != "" { - x = x[1:] // skip - or . - y = y[1:] // skip - or . - var dx, dy string - dx, x = nextIdent(x) - dy, y = nextIdent(y) - if dx != dy { - ix := isNum(dx) - iy := isNum(dy) - if ix != iy { - if ix { - return -1 - } else { - return +1 - } - } - if ix { - if len(dx) < len(dy) { - return -1 - } - if len(dx) > len(dy) { - return +1 - } - } - if dx < dy { - return -1 - } else { - return +1 - } - } - } - if x == "" { - return -1 - } else { - return +1 - } -} - -func nextIdent(x string) (dx, rest string) { - i := 0 - for i < len(x) && x[i] != '.' { - i++ - } - return x[:i], x[i:] -} diff --git a/vendor/golang.org/x/sys/unix/mkerrors.sh b/vendor/golang.org/x/sys/unix/mkerrors.sh index d1c8b264..6ab02b6c 100644 --- a/vendor/golang.org/x/sys/unix/mkerrors.sh +++ b/vendor/golang.org/x/sys/unix/mkerrors.sh @@ -349,9 +349,6 @@ struct ltchars { #define _HIDIOCGRAWPHYS HIDIOCGRAWPHYS(_HIDIOCGRAWPHYS_LEN) #define _HIDIOCGRAWUNIQ HIDIOCGRAWUNIQ(_HIDIOCGRAWUNIQ_LEN) -// Renamed in v6.16, commit c6d732c38f93 ("net: ethtool: remove duplicate defines for family info") -#define ETHTOOL_FAMILY_NAME ETHTOOL_GENL_NAME -#define ETHTOOL_FAMILY_VERSION ETHTOOL_GENL_VERSION ' includes_NetBSD=' diff --git a/vendor/golang.org/x/sys/unix/syscall_darwin.go b/vendor/golang.org/x/sys/unix/syscall_darwin.go index 7838ca5d..798f61ad 100644 --- a/vendor/golang.org/x/sys/unix/syscall_darwin.go +++ b/vendor/golang.org/x/sys/unix/syscall_darwin.go @@ -602,9 +602,14 @@ func Connectx(fd int, srcIf uint32, srcAddr, dstAddr Sockaddr, associd SaeAssocI return } +// sys connectx(fd int, endpoints *SaEndpoints, associd SaeAssocID, flags uint32, iov []Iovec, n *uintptr, connid *SaeConnID) (err error) const minIovec = 8 func Readv(fd int, iovs [][]byte) (n int, err error) { + if !darwinKernelVersionMin(11, 0, 0) { + return 0, ENOSYS + } + iovecs := make([]Iovec, 0, minIovec) iovecs = appendBytes(iovecs, iovs) n, err = readv(fd, iovecs) @@ -613,6 +618,9 @@ func Readv(fd int, iovs [][]byte) (n int, err error) { } func Preadv(fd int, iovs [][]byte, offset int64) (n int, err error) { + if !darwinKernelVersionMin(11, 0, 0) { + return 0, ENOSYS + } iovecs := make([]Iovec, 0, minIovec) iovecs = appendBytes(iovecs, iovs) n, err = preadv(fd, iovecs, offset) @@ -621,6 +629,10 @@ func Preadv(fd int, iovs [][]byte, offset int64) (n int, err error) { } func Writev(fd int, iovs [][]byte) (n int, err error) { + if !darwinKernelVersionMin(11, 0, 0) { + return 0, ENOSYS + } + iovecs := make([]Iovec, 0, minIovec) iovecs = appendBytes(iovecs, iovs) if raceenabled { @@ -632,6 +644,10 @@ func Writev(fd int, iovs [][]byte) (n int, err error) { } func Pwritev(fd int, iovs [][]byte, offset int64) (n int, err error) { + if !darwinKernelVersionMin(11, 0, 0) { + return 0, ENOSYS + } + iovecs := make([]Iovec, 0, minIovec) iovecs = appendBytes(iovecs, iovs) if raceenabled { @@ -691,7 +707,45 @@ func readvRacedetect(iovecs []Iovec, n int, err error) { } } -//sys connectx(fd int, endpoints *SaEndpoints, associd SaeAssocID, flags uint32, iov []Iovec, n *uintptr, connid *SaeConnID) (err error) +func darwinMajorMinPatch() (maj, min, patch int, err error) { + var un Utsname + err = Uname(&un) + if err != nil { + return + } + + var mmp [3]int + c := 0 +Loop: + for _, b := range un.Release[:] { + switch { + case b >= '0' && b <= '9': + mmp[c] = 10*mmp[c] + int(b-'0') + case b == '.': + c++ + if c > 2 { + return 0, 0, 0, ENOTSUP + } + case b == 0: + break Loop + default: + return 0, 0, 0, ENOTSUP + } + } + if c != 2 { + return 0, 0, 0, ENOTSUP + } + return mmp[0], mmp[1], mmp[2], nil +} + +func darwinKernelVersionMin(maj, min, patch int) bool { + actualMaj, actualMin, actualPatch, err := darwinMajorMinPatch() + if err != nil { + return false + } + return actualMaj > maj || actualMaj == maj && (actualMin > min || actualMin == min && actualPatch >= patch) +} + //sys sendfile(infd int, outfd int, offset int64, len *int64, hdtr unsafe.Pointer, flags int) (err error) //sys shmat(id int, addr uintptr, flag int) (ret uintptr, err error) diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux.go b/vendor/golang.org/x/sys/unix/zerrors_linux.go index b6db27d9..9e7a6c5a 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux.go @@ -328,8 +328,6 @@ const ( AUDIT_KERNEL = 0x7d0 AUDIT_KERNEL_OTHER = 0x524 AUDIT_KERN_MODULE = 0x532 - AUDIT_LANDLOCK_ACCESS = 0x58f - AUDIT_LANDLOCK_DOMAIN = 0x590 AUDIT_LAST_FEATURE = 0x1 AUDIT_LAST_KERN_ANOM_MSG = 0x707 AUDIT_LAST_USER_MSG = 0x4af @@ -494,7 +492,6 @@ const ( BPF_F_BEFORE = 0x8 BPF_F_ID = 0x20 BPF_F_NETFILTER_IP_DEFRAG = 0x1 - BPF_F_PREORDER = 0x40 BPF_F_QUERY_EFFECTIVE = 0x1 BPF_F_REDIRECT_FLAGS = 0x19 BPF_F_REPLACE = 0x4 @@ -531,7 +528,6 @@ const ( BPF_LDX = 0x1 BPF_LEN = 0x80 BPF_LL_OFF = -0x200000 - BPF_LOAD_ACQ = 0x100 BPF_LSH = 0x60 BPF_MAJOR_VERSION = 0x1 BPF_MAXINSNS = 0x1000 @@ -559,7 +555,6 @@ const ( BPF_RET = 0x6 BPF_RSH = 0x70 BPF_ST = 0x2 - BPF_STORE_REL = 0x110 BPF_STX = 0x3 BPF_SUB = 0x10 BPF_TAG_SIZE = 0x8 @@ -849,9 +844,9 @@ const ( DM_UUID_FLAG = 0x4000 DM_UUID_LEN = 0x81 DM_VERSION = 0xc138fd00 - DM_VERSION_EXTRA = "-ioctl (2025-04-28)" + DM_VERSION_EXTRA = "-ioctl (2025-01-17)" DM_VERSION_MAJOR = 0x4 - DM_VERSION_MINOR = 0x32 + DM_VERSION_MINOR = 0x31 DM_VERSION_PATCHLEVEL = 0x0 DT_BLK = 0x6 DT_CHR = 0x2 @@ -942,6 +937,9 @@ const ( EPOLL_CTL_MOD = 0x3 EPOLL_IOC_TYPE = 0x8a EROFS_SUPER_MAGIC_V1 = 0xe0f5e1e2 + ESP_V4_FLOW = 0xa + ESP_V6_FLOW = 0xc + ETHER_FLOW = 0x12 ETHTOOL_BUSINFO_LEN = 0x20 ETHTOOL_EROMVERS_LEN = 0x20 ETHTOOL_FAMILY_NAME = "ethtool" @@ -1215,7 +1213,6 @@ const ( FAN_EVENT_INFO_TYPE_DFID_NAME = 0x2 FAN_EVENT_INFO_TYPE_ERROR = 0x5 FAN_EVENT_INFO_TYPE_FID = 0x1 - FAN_EVENT_INFO_TYPE_MNT = 0x7 FAN_EVENT_INFO_TYPE_NEW_DFID_NAME = 0xc FAN_EVENT_INFO_TYPE_OLD_DFID_NAME = 0xa FAN_EVENT_INFO_TYPE_PIDFD = 0x4 @@ -1234,12 +1231,9 @@ const ( FAN_MARK_IGNORED_SURV_MODIFY = 0x40 FAN_MARK_IGNORE_SURV = 0x440 FAN_MARK_INODE = 0x0 - FAN_MARK_MNTNS = 0x110 FAN_MARK_MOUNT = 0x10 FAN_MARK_ONLYDIR = 0x8 FAN_MARK_REMOVE = 0x2 - FAN_MNT_ATTACH = 0x1000000 - FAN_MNT_DETACH = 0x2000000 FAN_MODIFY = 0x2 FAN_MOVE = 0xc0 FAN_MOVED_FROM = 0x40 @@ -1261,7 +1255,6 @@ const ( FAN_REPORT_DIR_FID = 0x400 FAN_REPORT_FD_ERROR = 0x2000 FAN_REPORT_FID = 0x200 - FAN_REPORT_MNT = 0x4000 FAN_REPORT_NAME = 0x800 FAN_REPORT_PIDFD = 0x80 FAN_REPORT_TARGET_FID = 0x1000 @@ -1281,7 +1274,6 @@ const ( FIB_RULE_PERMANENT = 0x1 FIB_RULE_UNRESOLVED = 0x4 FIDEDUPERANGE = 0xc0189436 - FSCRYPT_ADD_KEY_FLAG_HW_WRAPPED = 0x1 FSCRYPT_KEY_DESCRIPTOR_SIZE = 0x8 FSCRYPT_KEY_DESC_PREFIX = "fscrypt:" FSCRYPT_KEY_DESC_PREFIX_SIZE = 0x8 @@ -1590,6 +1582,7 @@ const ( IPV6_DONTFRAG = 0x3e IPV6_DROP_MEMBERSHIP = 0x15 IPV6_DSTOPTS = 0x3b + IPV6_FLOW = 0x11 IPV6_FREEBIND = 0x4e IPV6_HDRINCL = 0x24 IPV6_HOPLIMIT = 0x34 @@ -1640,6 +1633,7 @@ const ( IPV6_TRANSPARENT = 0x4b IPV6_UNICAST_HOPS = 0x10 IPV6_UNICAST_IF = 0x4c + IPV6_USER_FLOW = 0xe IPV6_V6ONLY = 0x1a IPV6_VERSION = 0x60 IPV6_VERSION_MASK = 0xf0 @@ -1701,6 +1695,7 @@ const ( IP_TTL = 0x2 IP_UNBLOCK_SOURCE = 0x25 IP_UNICAST_IF = 0x32 + IP_USER_FLOW = 0xd IP_XFRM_POLICY = 0x11 ISOFS_SUPER_MAGIC = 0x9660 ISTRIP = 0x20 @@ -1822,11 +1817,7 @@ const ( LANDLOCK_ACCESS_FS_WRITE_FILE = 0x2 LANDLOCK_ACCESS_NET_BIND_TCP = 0x1 LANDLOCK_ACCESS_NET_CONNECT_TCP = 0x2 - LANDLOCK_CREATE_RULESET_ERRATA = 0x2 LANDLOCK_CREATE_RULESET_VERSION = 0x1 - LANDLOCK_RESTRICT_SELF_LOG_NEW_EXEC_ON = 0x2 - LANDLOCK_RESTRICT_SELF_LOG_SAME_EXEC_OFF = 0x1 - LANDLOCK_RESTRICT_SELF_LOG_SUBDOMAINS_OFF = 0x4 LANDLOCK_SCOPE_ABSTRACT_UNIX_SOCKET = 0x1 LANDLOCK_SCOPE_SIGNAL = 0x2 LINUX_REBOOT_CMD_CAD_OFF = 0x0 @@ -2502,10 +2493,6 @@ const ( PR_FP_EXC_UND = 0x40000 PR_FP_MODE_FR = 0x1 PR_FP_MODE_FRE = 0x2 - PR_FUTEX_HASH = 0x4e - PR_FUTEX_HASH_GET_IMMUTABLE = 0x3 - PR_FUTEX_HASH_GET_SLOTS = 0x2 - PR_FUTEX_HASH_SET_SLOTS = 0x1 PR_GET_AUXV = 0x41555856 PR_GET_CHILD_SUBREAPER = 0x25 PR_GET_DUMPABLE = 0x3 @@ -2665,10 +2652,6 @@ const ( PR_TAGGED_ADDR_ENABLE = 0x1 PR_TASK_PERF_EVENTS_DISABLE = 0x1f PR_TASK_PERF_EVENTS_ENABLE = 0x20 - PR_TIMER_CREATE_RESTORE_IDS = 0x4d - PR_TIMER_CREATE_RESTORE_IDS_GET = 0x2 - PR_TIMER_CREATE_RESTORE_IDS_OFF = 0x0 - PR_TIMER_CREATE_RESTORE_IDS_ON = 0x1 PR_TIMING_STATISTICAL = 0x0 PR_TIMING_TIMESTAMP = 0x1 PR_TSC_ENABLE = 0x1 @@ -2749,7 +2732,6 @@ const ( PTRACE_SETREGSET = 0x4205 PTRACE_SETSIGINFO = 0x4203 PTRACE_SETSIGMASK = 0x420b - PTRACE_SET_SYSCALL_INFO = 0x4212 PTRACE_SET_SYSCALL_USER_DISPATCH_CONFIG = 0x4210 PTRACE_SINGLESTEP = 0x9 PTRACE_SYSCALL = 0x18 @@ -3000,7 +2982,6 @@ const ( RTPROT_NTK = 0xf RTPROT_OPENR = 0x63 RTPROT_OSPF = 0xbc - RTPROT_OVN = 0x54 RTPROT_RA = 0x9 RTPROT_REDIRECT = 0x1 RTPROT_RIP = 0xbd @@ -3355,7 +3336,7 @@ const ( TASKSTATS_GENL_NAME = "TASKSTATS" TASKSTATS_GENL_VERSION = 0x1 TASKSTATS_TYPE_MAX = 0x6 - TASKSTATS_VERSION = 0x10 + TASKSTATS_VERSION = 0xf TCIFLUSH = 0x0 TCIOFF = 0x2 TCIOFLUSH = 0x2 @@ -3425,6 +3406,8 @@ const ( TCP_TX_DELAY = 0x25 TCP_ULP = 0x1f TCP_USER_TIMEOUT = 0x12 + TCP_V4_FLOW = 0x1 + TCP_V6_FLOW = 0x5 TCP_WINDOW_CLAMP = 0xa TCP_ZEROCOPY_RECEIVE = 0x23 TFD_TIMER_ABSTIME = 0x1 @@ -3547,6 +3530,8 @@ const ( UDP_NO_CHECK6_RX = 0x66 UDP_NO_CHECK6_TX = 0x65 UDP_SEGMENT = 0x67 + UDP_V4_FLOW = 0x2 + UDP_V6_FLOW = 0x6 UMOUNT_NOFOLLOW = 0x8 USBDEVICE_SUPER_MAGIC = 0x9fa2 UTIME_NOW = 0x3fffffff @@ -3589,7 +3574,7 @@ const ( WDIOS_TEMPPANIC = 0x4 WDIOS_UNKNOWN = -0x1 WEXITED = 0x4 - WGALLOWEDIP_A_MAX = 0x4 + WGALLOWEDIP_A_MAX = 0x3 WGDEVICE_A_MAX = 0x8 WGPEER_A_MAX = 0xa WG_CMD_MAX = 0x1 @@ -3703,7 +3688,6 @@ const ( XDP_SHARED_UMEM = 0x1 XDP_STATISTICS = 0x7 XDP_TXMD_FLAGS_CHECKSUM = 0x2 - XDP_TXMD_FLAGS_LAUNCH_TIME = 0x4 XDP_TXMD_FLAGS_TIMESTAMP = 0x1 XDP_TX_METADATA = 0x2 XDP_TX_RING = 0x3 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_386.go b/vendor/golang.org/x/sys/unix/zerrors_linux_386.go index 1c37f9fb..a8c421e2 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_386.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_386.go @@ -68,7 +68,6 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 - DM_MPATH_PROBE_PATHS = 0xfd12 ECCGETLAYOUT = 0x81484d11 ECCGETSTATS = 0x80104d12 ECHOCTL = 0x200 @@ -361,7 +360,6 @@ const ( SO_OOBINLINE = 0xa SO_PASSCRED = 0x10 SO_PASSPIDFD = 0x4c - SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x11 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go index 6f54d34a..9a88d181 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go @@ -68,7 +68,6 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 - DM_MPATH_PROBE_PATHS = 0xfd12 ECCGETLAYOUT = 0x81484d11 ECCGETSTATS = 0x80104d12 ECHOCTL = 0x200 @@ -362,7 +361,6 @@ const ( SO_OOBINLINE = 0xa SO_PASSCRED = 0x10 SO_PASSPIDFD = 0x4c - SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x11 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go b/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go index 783ec5c1..7cb6a867 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go @@ -68,7 +68,6 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 - DM_MPATH_PROBE_PATHS = 0xfd12 ECCGETLAYOUT = 0x81484d11 ECCGETSTATS = 0x80104d12 ECHOCTL = 0x200 @@ -367,7 +366,6 @@ const ( SO_OOBINLINE = 0xa SO_PASSCRED = 0x10 SO_PASSPIDFD = 0x4c - SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x11 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go index ca83d3ba..d0ecd2c5 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go @@ -68,7 +68,6 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 - DM_MPATH_PROBE_PATHS = 0xfd12 ECCGETLAYOUT = 0x81484d11 ECCGETSTATS = 0x80104d12 ECHOCTL = 0x200 @@ -360,7 +359,6 @@ const ( SO_OOBINLINE = 0xa SO_PASSCRED = 0x10 SO_PASSPIDFD = 0x4c - SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x11 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go index 607e611c..7a2940ae 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go @@ -68,7 +68,6 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 - DM_MPATH_PROBE_PATHS = 0xfd12 ECCGETLAYOUT = 0x81484d11 ECCGETSTATS = 0x80104d12 ECHOCTL = 0x200 @@ -354,7 +353,6 @@ const ( SO_OOBINLINE = 0xa SO_PASSCRED = 0x10 SO_PASSPIDFD = 0x4c - SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x11 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go index b9cb5bd3..d14ca8f2 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go @@ -68,7 +68,6 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 - DM_MPATH_PROBE_PATHS = 0x2000fd12 ECCGETLAYOUT = 0x41484d11 ECCGETSTATS = 0x40104d12 ECHOCTL = 0x200 @@ -360,7 +359,6 @@ const ( SO_OOBINLINE = 0x100 SO_PASSCRED = 0x11 SO_PASSPIDFD = 0x4c - SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x12 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go index 65b078a6..2da1bac1 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go @@ -68,7 +68,6 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 - DM_MPATH_PROBE_PATHS = 0x2000fd12 ECCGETLAYOUT = 0x41484d11 ECCGETSTATS = 0x40104d12 ECHOCTL = 0x200 @@ -360,7 +359,6 @@ const ( SO_OOBINLINE = 0x100 SO_PASSCRED = 0x11 SO_PASSPIDFD = 0x4c - SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x12 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go index 5298a303..28727514 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go @@ -68,7 +68,6 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 - DM_MPATH_PROBE_PATHS = 0x2000fd12 ECCGETLAYOUT = 0x41484d11 ECCGETSTATS = 0x40104d12 ECHOCTL = 0x200 @@ -360,7 +359,6 @@ const ( SO_OOBINLINE = 0x100 SO_PASSCRED = 0x11 SO_PASSPIDFD = 0x4c - SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x12 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go index 7bc557c8..7f287b54 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go @@ -68,7 +68,6 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 - DM_MPATH_PROBE_PATHS = 0x2000fd12 ECCGETLAYOUT = 0x41484d11 ECCGETSTATS = 0x40104d12 ECHOCTL = 0x200 @@ -360,7 +359,6 @@ const ( SO_OOBINLINE = 0x100 SO_PASSCRED = 0x11 SO_PASSPIDFD = 0x4c - SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x12 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go index 152399bb..7e5f9e6a 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go @@ -68,7 +68,6 @@ const ( CS8 = 0x300 CSIZE = 0x300 CSTOPB = 0x400 - DM_MPATH_PROBE_PATHS = 0x2000fd12 ECCGETLAYOUT = 0x41484d11 ECCGETSTATS = 0x40104d12 ECHOCTL = 0x40 @@ -415,7 +414,6 @@ const ( SO_OOBINLINE = 0xa SO_PASSCRED = 0x14 SO_PASSPIDFD = 0x4c - SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x15 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go index 1a1ce240..37c87952 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go @@ -68,7 +68,6 @@ const ( CS8 = 0x300 CSIZE = 0x300 CSTOPB = 0x400 - DM_MPATH_PROBE_PATHS = 0x2000fd12 ECCGETLAYOUT = 0x41484d11 ECCGETSTATS = 0x40104d12 ECHOCTL = 0x40 @@ -419,7 +418,6 @@ const ( SO_OOBINLINE = 0xa SO_PASSCRED = 0x14 SO_PASSPIDFD = 0x4c - SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x15 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go index 4231a1fb..52201336 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go @@ -68,7 +68,6 @@ const ( CS8 = 0x300 CSIZE = 0x300 CSTOPB = 0x400 - DM_MPATH_PROBE_PATHS = 0x2000fd12 ECCGETLAYOUT = 0x41484d11 ECCGETSTATS = 0x40104d12 ECHOCTL = 0x40 @@ -419,7 +418,6 @@ const ( SO_OOBINLINE = 0xa SO_PASSCRED = 0x14 SO_PASSPIDFD = 0x4c - SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x15 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go index 21c0e952..4bfe2b5b 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go @@ -68,7 +68,6 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 - DM_MPATH_PROBE_PATHS = 0xfd12 ECCGETLAYOUT = 0x81484d11 ECCGETSTATS = 0x80104d12 ECHOCTL = 0x200 @@ -351,7 +350,6 @@ const ( SO_OOBINLINE = 0xa SO_PASSCRED = 0x10 SO_PASSPIDFD = 0x4c - SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x11 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go b/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go index f00d1cd7..e3cffb86 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go @@ -68,7 +68,6 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 - DM_MPATH_PROBE_PATHS = 0xfd12 ECCGETLAYOUT = 0x81484d11 ECCGETSTATS = 0x80104d12 ECHOCTL = 0x200 @@ -423,7 +422,6 @@ const ( SO_OOBINLINE = 0xa SO_PASSCRED = 0x10 SO_PASSPIDFD = 0x4c - SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x11 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go index bc8d539e..c219c8db 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go @@ -71,7 +71,6 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 - DM_MPATH_PROBE_PATHS = 0x2000fd12 ECCGETLAYOUT = 0x41484d11 ECCGETSTATS = 0x40104d12 ECHOCTL = 0x200 @@ -462,7 +461,6 @@ const ( SO_OOBINLINE = 0x100 SO_PASSCRED = 0x2 SO_PASSPIDFD = 0x55 - SO_PASSRIGHTS = 0x5c SO_PASSSEC = 0x1f SO_PEEK_OFF = 0x26 SO_PEERCRED = 0x40 diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_386.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_386.go index aca56ee4..c79aaff3 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_386.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_386.go @@ -462,5 +462,4 @@ const ( SYS_GETXATTRAT = 464 SYS_LISTXATTRAT = 465 SYS_REMOVEXATTRAT = 466 - SYS_OPEN_TREE_ATTR = 467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_amd64.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_amd64.go index 2ea1ef58..5eb45069 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_amd64.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_amd64.go @@ -385,5 +385,4 @@ const ( SYS_GETXATTRAT = 464 SYS_LISTXATTRAT = 465 SYS_REMOVEXATTRAT = 466 - SYS_OPEN_TREE_ATTR = 467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_arm.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_arm.go index d22c8af3..05e50297 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_arm.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_arm.go @@ -426,5 +426,4 @@ const ( SYS_GETXATTRAT = 464 SYS_LISTXATTRAT = 465 SYS_REMOVEXATTRAT = 466 - SYS_OPEN_TREE_ATTR = 467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_arm64.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_arm64.go index 5ee264ae..38c53ec5 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_arm64.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_arm64.go @@ -329,5 +329,4 @@ const ( SYS_GETXATTRAT = 464 SYS_LISTXATTRAT = 465 SYS_REMOVEXATTRAT = 466 - SYS_OPEN_TREE_ATTR = 467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_loong64.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_loong64.go index f9f03ebf..31d2e71a 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_loong64.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_loong64.go @@ -325,5 +325,4 @@ const ( SYS_GETXATTRAT = 464 SYS_LISTXATTRAT = 465 SYS_REMOVEXATTRAT = 466 - SYS_OPEN_TREE_ATTR = 467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_mips.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_mips.go index 87c2118e..f4184a33 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_mips.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_mips.go @@ -446,5 +446,4 @@ const ( SYS_GETXATTRAT = 4464 SYS_LISTXATTRAT = 4465 SYS_REMOVEXATTRAT = 4466 - SYS_OPEN_TREE_ATTR = 4467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64.go index 391ad102..05b99622 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64.go @@ -376,5 +376,4 @@ const ( SYS_GETXATTRAT = 5464 SYS_LISTXATTRAT = 5465 SYS_REMOVEXATTRAT = 5466 - SYS_OPEN_TREE_ATTR = 5467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64le.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64le.go index 56561577..43a256e9 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64le.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64le.go @@ -376,5 +376,4 @@ const ( SYS_GETXATTRAT = 5464 SYS_LISTXATTRAT = 5465 SYS_REMOVEXATTRAT = 5466 - SYS_OPEN_TREE_ATTR = 5467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_mipsle.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_mipsle.go index 0482b52e..eea5ddfc 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_mipsle.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_mipsle.go @@ -446,5 +446,4 @@ const ( SYS_GETXATTRAT = 4464 SYS_LISTXATTRAT = 4465 SYS_REMOVEXATTRAT = 4466 - SYS_OPEN_TREE_ATTR = 4467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc.go index 71806f08..0d777bfb 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc.go @@ -453,5 +453,4 @@ const ( SYS_GETXATTRAT = 464 SYS_LISTXATTRAT = 465 SYS_REMOVEXATTRAT = 466 - SYS_OPEN_TREE_ATTR = 467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64.go index e35a7105..b4463650 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64.go @@ -425,5 +425,4 @@ const ( SYS_GETXATTRAT = 464 SYS_LISTXATTRAT = 465 SYS_REMOVEXATTRAT = 466 - SYS_OPEN_TREE_ATTR = 467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64le.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64le.go index 2aea4767..0c7d21c1 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64le.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64le.go @@ -425,5 +425,4 @@ const ( SYS_GETXATTRAT = 464 SYS_LISTXATTRAT = 465 SYS_REMOVEXATTRAT = 466 - SYS_OPEN_TREE_ATTR = 467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_riscv64.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_riscv64.go index 6c9bb4e5..84053916 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_riscv64.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_riscv64.go @@ -330,5 +330,4 @@ const ( SYS_GETXATTRAT = 464 SYS_LISTXATTRAT = 465 SYS_REMOVEXATTRAT = 466 - SYS_OPEN_TREE_ATTR = 467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_s390x.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_s390x.go index 680bc991..fcf1b790 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_s390x.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_s390x.go @@ -391,5 +391,4 @@ const ( SYS_GETXATTRAT = 464 SYS_LISTXATTRAT = 465 SYS_REMOVEXATTRAT = 466 - SYS_OPEN_TREE_ATTR = 467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_sparc64.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_sparc64.go index 620f2710..52d15b5f 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_sparc64.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_sparc64.go @@ -404,5 +404,4 @@ const ( SYS_GETXATTRAT = 464 SYS_LISTXATTRAT = 465 SYS_REMOVEXATTRAT = 466 - SYS_OPEN_TREE_ATTR = 467 ) diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux.go b/vendor/golang.org/x/sys/unix/ztypes_linux.go index cd236443..8bcac283 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux.go @@ -115,9 +115,7 @@ type Statx_t struct { Atomic_write_unit_max uint32 Atomic_write_segments_max uint32 Dio_read_offset_align uint32 - Atomic_write_unit_max_opt uint32 - _ [1]uint32 - _ [8]uint64 + _ [9]uint64 } type Fsid struct { @@ -201,8 +199,7 @@ type FscryptAddKeyArg struct { Key_spec FscryptKeySpecifier Raw_size uint32 Key_id uint32 - Flags uint32 - _ [7]uint32 + _ [8]uint32 } type FscryptRemoveKeyArg struct { @@ -2320,11 +2317,6 @@ const ( NFT_CT_AVGPKT = 0x10 NFT_CT_ZONE = 0x11 NFT_CT_EVENTMASK = 0x12 - NFT_CT_SRC_IP = 0x13 - NFT_CT_DST_IP = 0x14 - NFT_CT_SRC_IP6 = 0x15 - NFT_CT_DST_IP6 = 0x16 - NFT_CT_ID = 0x17 NFTA_CT_UNSPEC = 0x0 NFTA_CT_DREG = 0x1 NFTA_CT_KEY = 0x2 @@ -2605,8 +2597,8 @@ const ( SOF_TIMESTAMPING_BIND_PHC = 0x8000 SOF_TIMESTAMPING_OPT_ID_TCP = 0x10000 - SOF_TIMESTAMPING_LAST = 0x40000 - SOF_TIMESTAMPING_MASK = 0x7ffff + SOF_TIMESTAMPING_LAST = 0x20000 + SOF_TIMESTAMPING_MASK = 0x3ffff SCM_TSTAMP_SND = 0x0 SCM_TSTAMP_SCHED = 0x1 @@ -4052,7 +4044,7 @@ const ( ETHTOOL_A_TSINFO_PHC_INDEX = 0x5 ETHTOOL_A_TSINFO_STATS = 0x6 ETHTOOL_A_TSINFO_HWTSTAMP_PROVIDER = 0x7 - ETHTOOL_A_TSINFO_MAX = 0x9 + ETHTOOL_A_TSINFO_MAX = 0x7 ETHTOOL_A_CABLE_TEST_UNSPEC = 0x0 ETHTOOL_A_CABLE_TEST_HEADER = 0x1 ETHTOOL_A_CABLE_TEST_MAX = 0x1 @@ -4138,19 +4130,6 @@ const ( ETHTOOL_A_TUNNEL_INFO_MAX = 0x2 ) -const ( - TCP_V4_FLOW = 0x1 - UDP_V4_FLOW = 0x2 - TCP_V6_FLOW = 0x5 - UDP_V6_FLOW = 0x6 - ESP_V4_FLOW = 0xa - ESP_V6_FLOW = 0xc - IP_USER_FLOW = 0xd - IPV6_USER_FLOW = 0xe - IPV6_FLOW = 0x11 - ETHER_FLOW = 0x12 -) - const SPEED_UNKNOWN = -0x1 type EthtoolDrvinfo struct { @@ -4801,7 +4780,7 @@ const ( NL80211_ATTR_MAC_HINT = 0xc8 NL80211_ATTR_MAC_MASK = 0xd7 NL80211_ATTR_MAX_AP_ASSOC_STA = 0xca - NL80211_ATTR_MAX = 0x151 + NL80211_ATTR_MAX = 0x150 NL80211_ATTR_MAX_CRIT_PROT_DURATION = 0xb4 NL80211_ATTR_MAX_CSA_COUNTERS = 0xce NL80211_ATTR_MAX_HW_TIMESTAMP_PEERS = 0x143 @@ -5435,7 +5414,7 @@ const ( NL80211_FREQUENCY_ATTR_GO_CONCURRENT = 0xf NL80211_FREQUENCY_ATTR_INDOOR_ONLY = 0xe NL80211_FREQUENCY_ATTR_IR_CONCURRENT = 0xf - NL80211_FREQUENCY_ATTR_MAX = 0x22 + NL80211_FREQUENCY_ATTR_MAX = 0x21 NL80211_FREQUENCY_ATTR_MAX_TX_POWER = 0x6 NL80211_FREQUENCY_ATTR_NO_10MHZ = 0x11 NL80211_FREQUENCY_ATTR_NO_160MHZ = 0xc @@ -5551,7 +5530,7 @@ const ( NL80211_MAX_SUPP_SELECTORS = 0x80 NL80211_MBSSID_CONFIG_ATTR_EMA = 0x5 NL80211_MBSSID_CONFIG_ATTR_INDEX = 0x3 - NL80211_MBSSID_CONFIG_ATTR_MAX = 0x6 + NL80211_MBSSID_CONFIG_ATTR_MAX = 0x5 NL80211_MBSSID_CONFIG_ATTR_MAX_EMA_PROFILE_PERIODICITY = 0x2 NL80211_MBSSID_CONFIG_ATTR_MAX_INTERFACES = 0x1 NL80211_MBSSID_CONFIG_ATTR_TX_IFINDEX = 0x4 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_386.go b/vendor/golang.org/x/sys/unix/ztypes_linux_386.go index 485f2d3a..62db85f6 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_386.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_386.go @@ -282,13 +282,19 @@ type Taskstats struct { Ac_exitcode uint32 Ac_flag uint8 Ac_nice uint8 - _ [6]byte + _ [4]byte Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -324,11 +330,17 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 _ [4]byte Ac_tgetime uint64 @@ -336,22 +348,10 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Irq_count uint64 - Irq_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Wpcopy_delay_max uint64 Wpcopy_delay_min uint64 + Irq_count uint64 + Irq_delay_total uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go index ecbd1ad8..7d89d648 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go @@ -300,10 +300,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -338,33 +344,27 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Irq_count uint64 - Irq_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Wpcopy_delay_max uint64 Wpcopy_delay_min uint64 + Irq_count uint64 + Irq_delay_total uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go b/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go index 02f0463a..9c0b39ee 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go @@ -91,7 +91,7 @@ type Stat_t struct { Gid uint32 Rdev uint64 _ uint16 - _ [6]byte + _ [4]byte Size int64 Blksize int32 _ [4]byte @@ -273,13 +273,19 @@ type Taskstats struct { Ac_exitcode uint32 Ac_flag uint8 Ac_nice uint8 - _ [6]byte + _ [4]byte Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]uint8 @@ -315,11 +321,17 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 _ [4]byte Ac_tgetime uint64 @@ -327,22 +339,10 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Irq_count uint64 - Irq_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Wpcopy_delay_max uint64 Wpcopy_delay_min uint64 + Irq_count uint64 + Irq_delay_total uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go index 6f4d400d..de9c7ff3 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go @@ -279,10 +279,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -317,33 +323,27 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Irq_count uint64 - Irq_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Wpcopy_delay_max uint64 Wpcopy_delay_min uint64 + Irq_count uint64 + Irq_delay_total uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go index cd532cfa..2336bd2b 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go @@ -280,10 +280,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -318,33 +324,27 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Irq_count uint64 - Irq_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Wpcopy_delay_max uint64 Wpcopy_delay_min uint64 + Irq_count uint64 + Irq_delay_total uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go b/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go index 41336208..4711f0be 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go @@ -278,13 +278,19 @@ type Taskstats struct { Ac_exitcode uint32 Ac_flag uint8 Ac_nice uint8 - _ [6]byte + _ [4]byte Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -320,11 +326,17 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 _ [4]byte Ac_tgetime uint64 @@ -332,22 +344,10 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Irq_count uint64 - Irq_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Wpcopy_delay_max uint64 Wpcopy_delay_min uint64 + Irq_count uint64 + Irq_delay_total uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go index eaa37eb7..ab99a34b 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go @@ -282,10 +282,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -320,33 +326,27 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Irq_count uint64 - Irq_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Wpcopy_delay_max uint64 Wpcopy_delay_min uint64 + Irq_count uint64 + Irq_delay_total uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go b/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go index 98ae6a1e..04c9866e 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go @@ -282,10 +282,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -320,33 +326,27 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Irq_count uint64 - Irq_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Wpcopy_delay_max uint64 Wpcopy_delay_min uint64 + Irq_count uint64 + Irq_delay_total uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go b/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go index cae19615..60aa69f6 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go @@ -278,13 +278,19 @@ type Taskstats struct { Ac_exitcode uint32 Ac_flag uint8 Ac_nice uint8 - _ [6]byte + _ [4]byte Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -320,11 +326,17 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 _ [4]byte Ac_tgetime uint64 @@ -332,22 +344,10 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Irq_count uint64 - Irq_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Wpcopy_delay_max uint64 Wpcopy_delay_min uint64 + Irq_count uint64 + Irq_delay_total uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go index 6ce3b4e0..cb4fad78 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go @@ -90,7 +90,7 @@ type Stat_t struct { Gid uint32 Rdev uint64 _ uint16 - _ [6]byte + _ [4]byte Size int64 Blksize int32 _ [4]byte @@ -285,13 +285,19 @@ type Taskstats struct { Ac_exitcode uint32 Ac_flag uint8 Ac_nice uint8 - _ [6]byte + _ [4]byte Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]uint8 @@ -327,11 +333,17 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 _ [4]byte Ac_tgetime uint64 @@ -339,22 +351,10 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Irq_count uint64 - Irq_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Wpcopy_delay_max uint64 Wpcopy_delay_min uint64 + Irq_count uint64 + Irq_delay_total uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go index c7429c6a..60272cfc 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go @@ -289,10 +289,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]uint8 @@ -327,33 +333,27 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Irq_count uint64 - Irq_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Wpcopy_delay_max uint64 Wpcopy_delay_min uint64 + Irq_count uint64 + Irq_delay_total uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go index 4bf4baf4..3f5b91bc 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go @@ -289,10 +289,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]uint8 @@ -327,33 +333,27 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Irq_count uint64 - Irq_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Wpcopy_delay_max uint64 Wpcopy_delay_min uint64 + Irq_count uint64 + Irq_delay_total uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go index e9709d70..51550f15 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go @@ -307,10 +307,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]uint8 @@ -345,33 +351,27 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Irq_count uint64 - Irq_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Wpcopy_delay_max uint64 Wpcopy_delay_min uint64 + Irq_count uint64 + Irq_delay_total uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go b/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go index fb44268c..3239e50e 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go @@ -302,10 +302,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -340,33 +346,27 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Irq_count uint64 - Irq_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Wpcopy_delay_max uint64 Wpcopy_delay_min uint64 + Irq_count uint64 + Irq_delay_total uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go index 9c38265c..faf20027 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go @@ -284,10 +284,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -322,33 +328,27 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Irq_count uint64 - Irq_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Wpcopy_delay_max uint64 Wpcopy_delay_min uint64 + Irq_count uint64 + Irq_delay_total uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/gorm.io/gorm/callbacks/create.go b/vendor/gorm.io/gorm/callbacks/create.go index cb8429b3..d8701f51 100644 --- a/vendor/gorm.io/gorm/callbacks/create.go +++ b/vendor/gorm.io/gorm/callbacks/create.go @@ -53,13 +53,9 @@ func Create(config *Config) func(db *gorm.DB) { if _, ok := db.Statement.Clauses["RETURNING"]; !ok { fromColumns := make([]clause.Column, 0, len(db.Statement.Schema.FieldsWithDefaultDBValue)) for _, field := range db.Statement.Schema.FieldsWithDefaultDBValue { - if field.Readable { - fromColumns = append(fromColumns, clause.Column{Name: field.DBName}) - } - } - if len(fromColumns) > 0 { - db.Statement.AddClause(clause.Returning{Columns: fromColumns}) + fromColumns = append(fromColumns, clause.Column{Name: field.DBName}) } + db.Statement.AddClause(clause.Returning{Columns: fromColumns}) } } } @@ -126,16 +122,6 @@ func Create(config *Config) func(db *gorm.DB) { pkFieldName = "@id" ) - if db.Statement.Schema != nil { - if db.Statement.Schema.PrioritizedPrimaryField == nil || - !db.Statement.Schema.PrioritizedPrimaryField.HasDefaultValue || - !db.Statement.Schema.PrioritizedPrimaryField.Readable { - return - } - pkField = db.Statement.Schema.PrioritizedPrimaryField - pkFieldName = db.Statement.Schema.PrioritizedPrimaryField.DBName - } - insertID, err := result.LastInsertId() insertOk := err == nil && insertID > 0 @@ -146,6 +132,14 @@ func Create(config *Config) func(db *gorm.DB) { return } + if db.Statement.Schema != nil { + if db.Statement.Schema.PrioritizedPrimaryField == nil || !db.Statement.Schema.PrioritizedPrimaryField.HasDefaultValue { + return + } + pkField = db.Statement.Schema.PrioritizedPrimaryField + pkFieldName = db.Statement.Schema.PrioritizedPrimaryField.DBName + } + // append @id column with value for auto-increment primary key // the @id value is correct, when: 1. without setting auto-increment primary key, 2. database AutoIncrementIncrement = 1 switch values := db.Statement.Dest.(type) { diff --git a/vendor/gorm.io/gorm/generics.go b/vendor/gorm.io/gorm/generics.go index f3c3e553..ad2d063f 100644 --- a/vendor/gorm.io/gorm/generics.go +++ b/vendor/gorm.io/gorm/generics.go @@ -567,7 +567,7 @@ func (g execG[T]) First(ctx context.Context) (T, error) { func (g execG[T]) Scan(ctx context.Context, result interface{}) error { var r T - err := g.g.apply(ctx).Model(r).Find(result).Error + err := g.g.apply(ctx).Model(r).Find(&result).Error return err } diff --git a/vendor/gorm.io/gorm/gorm.go b/vendor/gorm.io/gorm/gorm.go index 6619f071..67889262 100644 --- a/vendor/gorm.io/gorm/gorm.go +++ b/vendor/gorm.io/gorm/gorm.go @@ -137,14 +137,6 @@ func Open(dialector Dialector, opts ...Option) (db *DB, err error) { return isConfig && !isConfig2 }) - if len(opts) > 0 { - if c, ok := opts[0].(*Config); ok { - config = c - } else { - opts = append([]Option{config}, opts...) - } - } - var skipAfterInitialize bool for _, opt := range opts { if opt != nil { diff --git a/vendor/gorm.io/gorm/migrator/migrator.go b/vendor/gorm.io/gorm/migrator/migrator.go index 50a36d10..cec4e30f 100644 --- a/vendor/gorm.io/gorm/migrator/migrator.go +++ b/vendor/gorm.io/gorm/migrator/migrator.go @@ -474,6 +474,7 @@ func (m Migrator) MigrateColumn(value interface{}, field *schema.Field, columnTy // found, smart migrate fullDataType := strings.TrimSpace(strings.ToLower(m.DB.Migrator().FullDataTypeOf(field).SQL)) realDataType := strings.ToLower(columnType.DatabaseTypeName()) + var ( alterColumn bool isSameType = fullDataType == realDataType @@ -512,19 +513,8 @@ func (m Migrator) MigrateColumn(value interface{}, field *schema.Field, columnTy } } } - } - // check precision - if realDataType == "decimal" || realDataType == "numeric" && - regexp.MustCompile(realDataType+`\(.*\)`).FindString(fullDataType) != "" { // if realDataType has no precision,ignore - precision, scale, ok := columnType.DecimalSize() - if ok { - if !strings.HasPrefix(fullDataType, fmt.Sprintf("%s(%d,%d)", realDataType, precision, scale)) && - !strings.HasPrefix(fullDataType, fmt.Sprintf("%s(%d)", realDataType, precision)) { - alterColumn = true - } - } - } else { + // check precision if precision, _, ok := columnType.DecimalSize(); ok && int64(field.Precision) != precision { if regexp.MustCompile(fmt.Sprintf("[^0-9]%d[^0-9]", field.Precision)).MatchString(m.DataTypeOf(field)) { alterColumn = true diff --git a/vendor/gorm.io/gorm/schema/field.go b/vendor/gorm.io/gorm/schema/field.go index 67e60f70..a6ff1a72 100644 --- a/vendor/gorm.io/gorm/schema/field.go +++ b/vendor/gorm.io/gorm/schema/field.go @@ -448,30 +448,21 @@ func (schema *Schema) ParseField(fieldStruct reflect.StructField) *Field { } // create valuer, setter when parse struct -func (field *Field) setupValuerAndSetter(modelType reflect.Type) { +func (field *Field) setupValuerAndSetter() { // Setup NewValuePool field.setupNewValuePool() // ValueOf returns field's value and if it is zero fieldIndex := field.StructField.Index[0] switch { - case len(field.StructField.Index) == 1 && fieldIndex >= 0: - field.ValueOf = func(ctx context.Context, v reflect.Value) (interface{}, bool) { - v = reflect.Indirect(v) - if v.Type() != modelType { - fieldValue := v.FieldByName(field.Name) - return fieldValue.Interface(), fieldValue.IsZero() - } - fieldValue := v.Field(fieldIndex) + case len(field.StructField.Index) == 1 && fieldIndex > 0: + field.ValueOf = func(ctx context.Context, value reflect.Value) (interface{}, bool) { + fieldValue := reflect.Indirect(value).Field(fieldIndex) return fieldValue.Interface(), fieldValue.IsZero() } default: field.ValueOf = func(ctx context.Context, v reflect.Value) (interface{}, bool) { v = reflect.Indirect(v) - if v.Type() != modelType { - fieldValue := v.FieldByName(field.Name) - return fieldValue.Interface(), fieldValue.IsZero() - } for _, fieldIdx := range field.StructField.Index { if fieldIdx >= 0 { v = v.Field(fieldIdx) @@ -513,20 +504,13 @@ func (field *Field) setupValuerAndSetter(modelType reflect.Type) { // ReflectValueOf returns field's reflect value switch { - case len(field.StructField.Index) == 1 && fieldIndex >= 0: - field.ReflectValueOf = func(ctx context.Context, v reflect.Value) reflect.Value { - v = reflect.Indirect(v) - if v.Type() != modelType { - return v.FieldByName(field.Name) - } - return v.Field(fieldIndex) + case len(field.StructField.Index) == 1 && fieldIndex > 0: + field.ReflectValueOf = func(ctx context.Context, value reflect.Value) reflect.Value { + return reflect.Indirect(value).Field(fieldIndex) } default: field.ReflectValueOf = func(ctx context.Context, v reflect.Value) reflect.Value { v = reflect.Indirect(v) - if v.Type() != modelType { - return v.FieldByName(field.Name) - } for idx, fieldIdx := range field.StructField.Index { if fieldIdx >= 0 { v = v.Field(fieldIdx) diff --git a/vendor/gorm.io/gorm/schema/schema.go b/vendor/gorm.io/gorm/schema/schema.go index 2a5c28e2..db236797 100644 --- a/vendor/gorm.io/gorm/schema/schema.go +++ b/vendor/gorm.io/gorm/schema/schema.go @@ -5,7 +5,6 @@ import ( "errors" "fmt" "go/ast" - "path" "reflect" "strings" "sync" @@ -248,7 +247,7 @@ func ParseWithSpecialTableName(dest interface{}, cacheStore *sync.Map, namer Nam schema.FieldsByBindName[bindName] = field } - field.setupValuerAndSetter(modelType) + field.setupValuerAndSetter() } prioritizedPrimaryField := schema.LookUpField("id") @@ -314,14 +313,8 @@ func ParseWithSpecialTableName(dest interface{}, cacheStore *sync.Map, namer Nam for _, cbName := range callbackTypes { if methodValue := callBackToMethodValue(modelValue, cbName); methodValue.IsValid() { switch methodValue.Type().String() { - case "func(*gorm.DB) error": - expectedPkgPath := path.Dir(reflect.TypeOf(schema).Elem().PkgPath()) - if inVarPkg := methodValue.Type().In(0).Elem().PkgPath(); inVarPkg == expectedPkgPath { - reflect.Indirect(reflect.ValueOf(schema)).FieldByName(string(cbName)).SetBool(true) - } else { - logger.Default.Warn(context.Background(), "In model %v, the hook function `%v(*gorm.DB) error` has an incorrect parameter type. The expected parameter type is `%v`, but the provided type is `%v`.", schema, cbName, expectedPkgPath, inVarPkg) - // PASS - } + case "func(*gorm.DB) error": // TODO hack + reflect.Indirect(reflect.ValueOf(schema)).FieldByName(string(cbName)).SetBool(true) default: logger.Default.Warn(context.Background(), "Model %v don't match %vInterface, should be `%v(*gorm.DB) error`. Please see https://gorm.io/docs/hooks.html", schema, cbName, cbName) } diff --git a/vendor/gorm.io/gorm/statement.go b/vendor/gorm.io/gorm/statement.go index ba5d3f18..c6183724 100644 --- a/vendor/gorm.io/gorm/statement.go +++ b/vendor/gorm.io/gorm/statement.go @@ -341,9 +341,7 @@ func (stmt *Statement) BuildCondition(query interface{}, args ...interface{}) [] if where, ok := cs.Expression.(clause.Where); ok { if len(where.Exprs) == 1 { if orConds, ok := where.Exprs[0].(clause.OrConditions); ok { - if len(orConds.Exprs) == 1 { - where.Exprs[0] = clause.AndConditions(orConds) - } + where.Exprs[0] = clause.AndConditions(orConds) } } conds = append(conds, clause.And(where.Exprs...)) @@ -364,9 +362,6 @@ func (stmt *Statement) BuildCondition(query interface{}, args ...interface{}) [] for _, key := range keys { column := clause.Column{Name: key, Table: curTable} - if strings.Contains(key, ".") { - column = clause.Column{Name: key} - } conds = append(conds, clause.Eq{Column: column, Value: v[key]}) } case map[string]interface{}: @@ -379,9 +374,6 @@ func (stmt *Statement) BuildCondition(query interface{}, args ...interface{}) [] for _, key := range keys { reflectValue := reflect.Indirect(reflect.ValueOf(v[key])) column := clause.Column{Name: key, Table: curTable} - if strings.Contains(key, ".") { - column = clause.Column{Name: key} - } switch reflectValue.Kind() { case reflect.Slice, reflect.Array: if _, ok := v[key].(driver.Valuer); ok { diff --git a/vendor/modules.txt b/vendor/modules.txt index 5dd8f751..980c48a0 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -21,7 +21,7 @@ github.com/cespare/xxhash/v2 # github.com/chzyer/readline v1.5.1 ## explicit; go 1.15 github.com/chzyer/readline -# github.com/cloudbase/garm-provider-common v0.1.7 +# github.com/cloudbase/garm-provider-common v0.1.6 ## explicit; go 1.23.0 github.com/cloudbase/garm-provider-common/defaults github.com/cloudbase/garm-provider-common/errors @@ -52,10 +52,10 @@ github.com/go-openapi/analysis/internal/flatten/operations github.com/go-openapi/analysis/internal/flatten/replace github.com/go-openapi/analysis/internal/flatten/schutils github.com/go-openapi/analysis/internal/flatten/sortref -# github.com/go-openapi/errors v0.22.2 +# github.com/go-openapi/errors v0.22.1 ## explicit; go 1.20 github.com/go-openapi/errors -# github.com/go-openapi/jsonpointer v0.21.2 +# github.com/go-openapi/jsonpointer v0.21.1 ## explicit; go 1.20 github.com/go-openapi/jsonpointer # github.com/go-openapi/jsonreference v0.21.0 @@ -94,8 +94,8 @@ github.com/go-sql-driver/mysql # github.com/golang-jwt/jwt/v4 v4.5.2 ## explicit; go 1.16 github.com/golang-jwt/jwt/v4 -# github.com/golang-jwt/jwt/v5 v5.3.0 -## explicit; go 1.21 +# github.com/golang-jwt/jwt/v5 v5.2.3 +## explicit; go 1.18 github.com/golang-jwt/jwt/v5 # github.com/google/go-github/v72 v72.0.0 ## explicit; go 1.23.0 @@ -118,7 +118,7 @@ github.com/gorilla/websocket # github.com/inconshreveable/mousetrap v1.1.0 ## explicit; go 1.18 github.com/inconshreveable/mousetrap -# github.com/jedib0t/go-pretty/v6 v6.6.8 +# github.com/jedib0t/go-pretty/v6 v6.6.7 ## explicit; go 1.18 github.com/jedib0t/go-pretty/v6/table github.com/jedib0t/go-pretty/v6/text @@ -131,6 +131,19 @@ github.com/jinzhu/now # github.com/josharian/intern v1.0.0 ## explicit; go 1.5 github.com/josharian/intern +# github.com/juju/clock v1.1.1 +## explicit; go 1.18 +github.com/juju/clock +# github.com/juju/errors v1.0.0 +## explicit; go 1.18 +github.com/juju/errors +# github.com/juju/loggo v1.0.0 +## explicit; go 1.14 +# github.com/juju/retry v1.0.1 +## explicit; go 1.17 +github.com/juju/retry +# github.com/juju/testing v1.0.2 +## explicit; go 1.17 # github.com/mailru/easyjson v0.9.0 ## explicit; go 1.20 github.com/mailru/easyjson/buffer @@ -147,7 +160,7 @@ github.com/mattn/go-isatty # github.com/mattn/go-runewidth v0.0.16 ## explicit; go 1.9 github.com/mattn/go-runewidth -# github.com/mattn/go-sqlite3 v1.14.31 +# github.com/mattn/go-sqlite3 v1.14.28 ## explicit; go 1.19 github.com/mattn/go-sqlite3 # github.com/minio/sio v0.4.1 @@ -184,8 +197,8 @@ github.com/pkg/errors # github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 ## explicit github.com/pmezard/go-difflib/difflib -# github.com/prometheus/client_golang v1.23.0 -## explicit; go 1.23.0 +# github.com/prometheus/client_golang v1.22.0 +## explicit; go 1.22 github.com/prometheus/client_golang/internal/github.com/golang/gddo/httputil github.com/prometheus/client_golang/internal/github.com/golang/gddo/httputil/header github.com/prometheus/client_golang/prometheus @@ -199,7 +212,7 @@ github.com/prometheus/client_model/go ## explicit; go 1.23.0 github.com/prometheus/common/expfmt github.com/prometheus/common/model -# github.com/prometheus/procfs v0.16.1 +# github.com/prometheus/procfs v0.17.0 ## explicit; go 1.23.0 github.com/prometheus/procfs github.com/prometheus/procfs/internal/fs @@ -216,7 +229,7 @@ github.com/spf13/pflag # github.com/stretchr/objx v0.5.2 ## explicit; go 1.20 github.com/stretchr/objx -# github.com/stretchr/testify v1.11.0 +# github.com/stretchr/testify v1.10.0 ## explicit; go 1.17 github.com/stretchr/testify/assert github.com/stretchr/testify/assert/yaml @@ -239,7 +252,7 @@ go.mongodb.org/mongo-driver/x/bsonx/bsoncore ## explicit; go 1.22.0 go.opentelemetry.io/auto/sdk go.opentelemetry.io/auto/sdk/internal/telemetry -# go.opentelemetry.io/otel v1.36.0 +# go.opentelemetry.io/otel v1.37.0 ## explicit; go 1.23.0 go.opentelemetry.io/otel go.opentelemetry.io/otel/attribute @@ -253,17 +266,18 @@ go.opentelemetry.io/otel/semconv/internal/v2 go.opentelemetry.io/otel/semconv/v1.17.0 go.opentelemetry.io/otel/semconv/v1.17.0/httpconv go.opentelemetry.io/otel/semconv/v1.26.0 -# go.opentelemetry.io/otel/metric v1.36.0 +go.opentelemetry.io/otel/semconv/v1.34.0 +# go.opentelemetry.io/otel/metric v1.37.0 ## explicit; go 1.23.0 go.opentelemetry.io/otel/metric go.opentelemetry.io/otel/metric/embedded -# go.opentelemetry.io/otel/trace v1.36.0 +# go.opentelemetry.io/otel/trace v1.37.0 ## explicit; go 1.23.0 go.opentelemetry.io/otel/trace go.opentelemetry.io/otel/trace/embedded go.opentelemetry.io/otel/trace/internal/telemetry go.opentelemetry.io/otel/trace/noop -# golang.org/x/crypto v0.41.0 +# golang.org/x/crypto v0.40.0 ## explicit; go 1.23.0 golang.org/x/crypto/bcrypt golang.org/x/crypto/blowfish @@ -272,9 +286,6 @@ golang.org/x/crypto/chacha20poly1305 golang.org/x/crypto/hkdf golang.org/x/crypto/internal/alias golang.org/x/crypto/internal/poly1305 -# golang.org/x/mod v0.27.0 -## explicit; go 1.23.0 -golang.org/x/mod/semver # golang.org/x/net v0.42.0 ## explicit; go 1.23.0 golang.org/x/net/internal/socks @@ -286,12 +297,12 @@ golang.org/x/oauth2/internal # golang.org/x/sync v0.16.0 ## explicit; go 1.23.0 golang.org/x/sync/errgroup -# golang.org/x/sys v0.35.0 +# golang.org/x/sys v0.34.0 ## explicit; go 1.23.0 golang.org/x/sys/cpu golang.org/x/sys/unix golang.org/x/sys/windows -# golang.org/x/text v0.28.0 +# golang.org/x/text v0.27.0 ## explicit; go 1.23.0 golang.org/x/text/cases golang.org/x/text/internal @@ -351,7 +362,7 @@ gorm.io/driver/mysql # gorm.io/driver/sqlite v1.6.0 ## explicit; go 1.20 gorm.io/driver/sqlite -# gorm.io/gorm v1.30.1 +# gorm.io/gorm v1.30.0 ## explicit; go 1.18 gorm.io/gorm gorm.io/gorm/callbacks diff --git a/webapp/.env.development b/webapp/.env.development deleted file mode 100644 index 17e74eaa..00000000 --- a/webapp/.env.development +++ /dev/null @@ -1,2 +0,0 @@ -VITE_GARM_API_URL=http://localhost:9997 -NODE_ENV=development diff --git a/webapp/.env.example b/webapp/.env.example deleted file mode 100644 index 2e2f8dbb..00000000 --- a/webapp/.env.example +++ /dev/null @@ -1,8 +0,0 @@ -# Development Environment Variables - -# GARM Backend API URL (for development only) -# When set, the frontend will connect to this URL instead of using proxy -# VITE_GARM_API_URL=http://localhost:9997 - -# Node Environment (automatically set by npm scripts) -# NODE_ENV=development \ No newline at end of file diff --git a/webapp/DEV_SETUP.md b/webapp/DEV_SETUP.md deleted file mode 100644 index 2eb778f3..00000000 --- a/webapp/DEV_SETUP.md +++ /dev/null @@ -1,79 +0,0 @@ -# Development Setup - -The web app can be started with the `npm run dev` command, which will start a development server with hot reloading. To properly work, there are a number of prerequisites you need to have and some GARM settings to tweak. - -## Prerequisites - -To have a full development setup, you will need the following prerequisites: - -- **Node.js 24+** and **npm** -- **Go 1.24+** (for building the GARM backend) -- **openapi-generator-cli** in your PATH (for API client generation) - -The `openapi-generator-cli` will also need java to be installed. If you're running on Ubuntu, running: - -```bash -sudo apt-get install default-jre -``` - -should be enough. Different distros should have an equivalent package available. - ->[!NOTE] ->If you don't need to change the web app, you don't need to rebuild it. There is already a pre-built version in the repo. - -## Necessary GARM settings - -GARM has strict origin checks for websockets and API calls. To allow your local development server to communicate with the GARM backend, you need to configure the following settings: - -```toml -[apiserver] -cors_origins = ["https://garm.example.com", "http://127.0.0.1:5173"] -``` - ->[!IMPORTANT] -> You must include the port. - ->[!IMPORTANT] -> Omitting the `cors_origins` option will automatically check same host origin. - -## Development Server - -Your GARM server can be started and hosted anywhere. As long as you set the proper `cors_origins` URLs, your web-ui development server can be separate from your GARM server. To point the web app to the GARM server, you will need to create an `.env.development` file in the `webapp/` directory: - -```bash -cd /home/ubuntu/garm/webapp -echo "VITE_GARM_API_URL=http://localhost:9997" > .env -echo "NODE_ENV=development" >> .env -npm run dev -``` - -## Asset Management - -During development: -- SVG icons are served from `static/assets/` -- Favicons are served from `static/` -- All static assets are copied from `assets/assets/` to `static/assets/` - -## Building for Production - -For production deployments, the web app is embedded into the GARM binary. You don't need to serve it separately. To build the web app and embed it into the binary, run the following 2 commands: - -```bash -# Build the static webapp -make build-webui -# Build the garm binary with the webapp embedded -make build -``` - -This creates the production build with: -- Base path set to `/ui` -- All assets embedded for Go to serve -- Optimized bundles - ->[!IMPORTANT] ->The web UI is an optional feature in GARM. For the `/ui` URL to be available, you will need to enable it in the garm config file under: ->```toml ->[apiserver.webui] -> enable=true ->``` ->See the sample config file in the `testdata/config.toml` file. \ No newline at end of file diff --git a/webapp/README.md b/webapp/README.md deleted file mode 100644 index 4b63b2a8..00000000 --- a/webapp/README.md +++ /dev/null @@ -1,102 +0,0 @@ -# GARM SPA (SvelteKit) - -This is a Single Page Application (SPA) implementation of the GARM web interface using SvelteKit. - -## Features - -- **Lightweight**: Uses SvelteKit for minimal bundle size and fast performance -- **Modern**: TypeScript-first development with full type safety -- **Responsive**: Mobile-first design using Tailwind CSS -- **Real-time**: WebSocket integration for live updates -- **API-driven**: Uses the existing GARM REST API endpoints - -### Quick Start - -1. **Clone the repository** (if not already done) - -```bash -git clone https://github.com/cloudbase/garm.git -cd garm -``` - -2. **Build and test GARM with embedded webapp** - -```bash -# You can skip this command if you made no changes to the webapp. -make build-webui -# builds the binary, with the web UI embedded. -make build -``` - -Make sure you enable the webui in the config: - -```toml -[apiserver.webui] - enable=true -``` - -3. **Access the webapp** - - Navigate to `http://localhost:9997/ui/` (or your configured fqdn and port) - -### Development Workflow - -See the [DEV_SETUP.md](DEV_SETUP.md) file. - -### Git Workflow - -**DO NOT commit** the following directories: -- `webapp/node_modules/` - Dependencies (managed by package-lock.json) -- `webapp/.svelte-kit/` - Build cache and generated files -- `webapp/build/` - Production build output - -These are already included in `.gitignore`. Only commit source files in `webapp/src/` and configuration files. - -### API Client Generation - -The webapp uses auto-generated TypeScript clients from the GARM OpenAPI spec using `go generate`. To regenerate the clients, mocks and everything else, run: - -```bash -go generate ./... -``` - -In the root folder of the project. - ->[!NOTE] -> See [DEV_SETUP.md](DEV_SETUP.md) for prerequisites, before you try to generate the files. - -### Asset Serving - -The webapp is embedded using Go's `embed` package in `webapp/assets/assets.go`: - -```go -//go:embed all:* -var EmbeddedSPA embed.FS -``` - -This allows GARM to serve the entire webapp with zero external dependencies. The webapp assets are compiled into the Go binary at build time. - -## Running GARM behind a reverse proxy - -In production, GARM will serve the web UI and assets from the embedded files inside the binary. The web UI also relies on the [events](/doc/events.md) API for real-time updates. - -To have a fully working experience, you will need to configure your reverse proxy to allow websocket upgrades. For an `nginx` example, see [the sample config in the testdata folder](/testdata/nginx-server.conf). - -Additionally, in production you can also override the default web UI that is embedded in GARM, without updating the garm binary. To do that, build the webapp, place it in the document root of `nginx` and create a new `location /ui` config in nginx. Something like the following should work: - -``` - # Place this before the proxy_pass location - location ~ ^/ui(/.*)?$ { - root /var/www/html/garm-webui/; - } - - location / { - proxy_set_header X-Forwarded-For $remote_addr; - proxy_set_header X-Forwarded-Host $http_host; - - proxy_pass http://garm_backend; - proxy_set_header Host $Host; - proxy_redirect off; - } -``` - -This should allow you to override the default web UI embedded in GARM without updating the GARM binary. diff --git a/webapp/assets/_app/env.js b/webapp/assets/_app/env.js deleted file mode 100644 index f5427da6..00000000 --- a/webapp/assets/_app/env.js +++ /dev/null @@ -1 +0,0 @@ -export const env={} \ No newline at end of file diff --git a/webapp/assets/_app/immutable/assets/0.srAxWR-A.css b/webapp/assets/_app/immutable/assets/0.srAxWR-A.css deleted file mode 100644 index c83b2922..00000000 --- a/webapp/assets/_app/immutable/assets/0.srAxWR-A.css +++ /dev/null @@ -1 +0,0 @@ -/*! tailwindcss v4.1.11 | MIT License | https://tailwindcss.com */@layer properties{@supports ((-webkit-hyphens:none) and (not (margin-trim:inline))) or ((-moz-orient:inline) and (not (color:rgb(from red r g b)))){*,:before,:after,::backdrop{--tw-translate-x:0;--tw-translate-y:0;--tw-translate-z:0;--tw-rotate-x:initial;--tw-rotate-y:initial;--tw-rotate-z:initial;--tw-skew-x:initial;--tw-skew-y:initial;--tw-space-y-reverse:0;--tw-space-x-reverse:0;--tw-divide-y-reverse:0;--tw-border-style:solid;--tw-gradient-position:initial;--tw-gradient-from:#0000;--tw-gradient-via:#0000;--tw-gradient-to:#0000;--tw-gradient-stops:initial;--tw-gradient-via-stops:initial;--tw-gradient-from-position:0%;--tw-gradient-via-position:50%;--tw-gradient-to-position:100%;--tw-leading:initial;--tw-font-weight:initial;--tw-tracking:initial;--tw-shadow:0 0 #0000;--tw-shadow-color:initial;--tw-shadow-alpha:100%;--tw-inset-shadow:0 0 #0000;--tw-inset-shadow-color:initial;--tw-inset-shadow-alpha:100%;--tw-ring-color:initial;--tw-ring-shadow:0 0 #0000;--tw-inset-ring-color:initial;--tw-inset-ring-shadow:0 0 #0000;--tw-ring-inset:initial;--tw-ring-offset-width:0px;--tw-ring-offset-color:#fff;--tw-ring-offset-shadow:0 0 #0000;--tw-blur:initial;--tw-brightness:initial;--tw-contrast:initial;--tw-grayscale:initial;--tw-hue-rotate:initial;--tw-invert:initial;--tw-opacity:initial;--tw-saturate:initial;--tw-sepia:initial;--tw-drop-shadow:initial;--tw-drop-shadow-color:initial;--tw-drop-shadow-alpha:100%;--tw-drop-shadow-size:initial;--tw-duration:initial;--tw-ease:initial;--tw-scale-x:1;--tw-scale-y:1;--tw-scale-z:1}}}@layer theme{:root,:host{--font-sans:ui-sans-serif,system-ui,sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji";--font-mono:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace;--color-red-50:oklch(97.1% .013 17.38);--color-red-100:oklch(93.6% .032 17.717);--color-red-200:oklch(88.5% .062 18.334);--color-red-300:oklch(80.8% .114 19.571);--color-red-400:oklch(70.4% .191 22.216);--color-red-500:oklch(63.7% .237 25.331);--color-red-600:oklch(57.7% .245 27.325);--color-red-700:oklch(50.5% .213 27.518);--color-red-800:oklch(44.4% .177 26.899);--color-red-900:oklch(39.6% .141 25.723);--color-orange-50:oklch(98% .016 73.684);--color-orange-400:oklch(75% .183 55.934);--color-orange-500:oklch(70.5% .213 47.604);--color-orange-600:oklch(64.6% .222 41.116);--color-orange-700:oklch(55.3% .195 38.402);--color-yellow-50:oklch(98.7% .026 102.212);--color-yellow-100:oklch(97.3% .071 103.193);--color-yellow-200:oklch(94.5% .129 101.54);--color-yellow-300:oklch(90.5% .182 98.111);--color-yellow-400:oklch(85.2% .199 91.936);--color-yellow-500:oklch(79.5% .184 86.047);--color-yellow-600:oklch(68.1% .162 75.834);--color-yellow-700:oklch(55.4% .135 66.442);--color-yellow-800:oklch(47.6% .114 61.907);--color-yellow-900:oklch(42.1% .095 57.708);--color-green-50:oklch(98.2% .018 155.826);--color-green-100:oklch(96.2% .044 156.743);--color-green-200:oklch(92.5% .084 155.995);--color-green-300:oklch(87.1% .15 154.449);--color-green-400:oklch(79.2% .209 151.711);--color-green-500:oklch(72.3% .219 149.579);--color-green-600:oklch(62.7% .194 149.214);--color-green-700:oklch(52.7% .154 150.069);--color-green-800:oklch(44.8% .119 151.328);--color-green-900:oklch(39.3% .095 152.535);--color-blue-50:oklch(97% .014 254.604);--color-blue-100:oklch(93.2% .032 255.585);--color-blue-200:oklch(88.2% .059 254.128);--color-blue-300:oklch(80.9% .105 251.813);--color-blue-400:oklch(70.7% .165 254.624);--color-blue-500:oklch(62.3% .214 259.815);--color-blue-600:oklch(54.6% .245 262.881);--color-blue-700:oklch(48.8% .243 264.376);--color-blue-800:oklch(42.4% .199 265.638);--color-blue-900:oklch(37.9% .146 265.522);--color-indigo-300:oklch(78.5% .115 274.713);--color-indigo-400:oklch(67.3% .182 276.935);--color-indigo-500:oklch(58.5% .233 277.117);--color-indigo-600:oklch(51.1% .262 276.966);--color-indigo-900:oklch(35.9% .144 278.697);--color-purple-50:oklch(97.7% .014 308.299);--color-purple-400:oklch(71.4% .203 305.504);--color-purple-500:oklch(62.7% .265 303.9);--color-purple-600:oklch(55.8% .288 302.321);--color-purple-700:oklch(49.6% .265 301.924);--color-gray-50:oklch(98.5% .002 247.839);--color-gray-100:oklch(96.7% .003 264.542);--color-gray-200:oklch(92.8% .006 264.531);--color-gray-300:oklch(87.2% .01 258.338);--color-gray-400:oklch(70.7% .022 261.325);--color-gray-500:oklch(55.1% .027 264.364);--color-gray-600:oklch(44.6% .03 256.802);--color-gray-700:oklch(37.3% .034 259.733);--color-gray-800:oklch(27.8% .033 256.848);--color-gray-900:oklch(21% .034 264.665);--color-black:#000;--color-white:#fff;--spacing:.25rem;--container-xs:20rem;--container-sm:24rem;--container-md:28rem;--container-xl:36rem;--container-2xl:42rem;--container-6xl:72rem;--container-7xl:80rem;--text-xs:.75rem;--text-xs--line-height:calc(1/.75);--text-sm:.875rem;--text-sm--line-height:calc(1.25/.875);--text-base:1rem;--text-base--line-height: 1.5 ;--text-lg:1.125rem;--text-lg--line-height:calc(1.75/1.125);--text-xl:1.25rem;--text-xl--line-height:calc(1.75/1.25);--text-2xl:1.5rem;--text-2xl--line-height:calc(2/1.5);--text-3xl:1.875rem;--text-3xl--line-height: 1.2 ;--font-weight-medium:500;--font-weight-semibold:600;--font-weight-bold:700;--font-weight-extrabold:800;--tracking-wide:.025em;--tracking-wider:.05em;--radius-md:.375rem;--radius-lg:.5rem;--ease-in-out:cubic-bezier(.4,0,.2,1);--animate-spin:spin 1s linear infinite;--animate-pulse:pulse 2s cubic-bezier(.4,0,.6,1)infinite;--default-transition-duration:.15s;--default-transition-timing-function:cubic-bezier(.4,0,.2,1);--default-font-family:var(--font-sans);--default-mono-font-family:var(--font-mono)}}@layer base{*,:after,:before,::backdrop{box-sizing:border-box;border:0 solid;margin:0;padding:0}::file-selector-button{box-sizing:border-box;border:0 solid;margin:0;padding:0}html,:host{-webkit-text-size-adjust:100%;-moz-tab-size:4;-o-tab-size:4;tab-size:4;line-height:1.5;font-family:var(--default-font-family,ui-sans-serif,system-ui,sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji");font-feature-settings:var(--default-font-feature-settings,normal);font-variation-settings:var(--default-font-variation-settings,normal);-webkit-tap-highlight-color:transparent}hr{height:0;color:inherit;border-top-width:1px}abbr:where([title]){-webkit-text-decoration:underline dotted;text-decoration:underline dotted}h1,h2,h3,h4,h5,h6{font-size:inherit;font-weight:inherit}a{color:inherit;-webkit-text-decoration:inherit;text-decoration:inherit}b,strong{font-weight:bolder}code,kbd,samp,pre{font-family:var(--default-mono-font-family,ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace);font-feature-settings:var(--default-mono-font-feature-settings,normal);font-variation-settings:var(--default-mono-font-variation-settings,normal);font-size:1em}small{font-size:80%}sub,sup{vertical-align:baseline;font-size:75%;line-height:0;position:relative}sub{bottom:-.25em}sup{top:-.5em}table{text-indent:0;border-color:inherit;border-collapse:collapse}:-moz-focusring{outline:auto}progress{vertical-align:baseline}summary{display:list-item}ol,ul,menu{list-style:none}img,svg,video,canvas,audio,iframe,embed,object{vertical-align:middle;display:block}img,video{max-width:100%;height:auto}button,input,select,optgroup,textarea{font:inherit;font-feature-settings:inherit;font-variation-settings:inherit;letter-spacing:inherit;color:inherit;opacity:1;background-color:#0000;border-radius:0}::file-selector-button{font:inherit;font-feature-settings:inherit;font-variation-settings:inherit;letter-spacing:inherit;color:inherit;opacity:1;background-color:#0000;border-radius:0}:where(select:is([multiple],[size])) optgroup{font-weight:bolder}:where(select:is([multiple],[size])) optgroup option{padding-inline-start:20px}::file-selector-button{margin-inline-end:4px}::-moz-placeholder{opacity:1}::placeholder{opacity:1}@supports (not (-webkit-appearance:-apple-pay-button)) or (contain-intrinsic-size:1px){::-moz-placeholder{color:currentColor}::placeholder{color:currentColor}@supports (color:color-mix(in lab,red,red)){::-moz-placeholder{color:color-mix(in oklab,currentcolor 50%,transparent)}::placeholder{color:color-mix(in oklab,currentcolor 50%,transparent)}}}textarea{resize:vertical}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-date-and-time-value{min-height:1lh;text-align:inherit}::-webkit-datetime-edit{display:inline-flex}::-webkit-datetime-edit-fields-wrapper{padding:0}::-webkit-datetime-edit{padding-block:0}::-webkit-datetime-edit-year-field{padding-block:0}::-webkit-datetime-edit-month-field{padding-block:0}::-webkit-datetime-edit-day-field{padding-block:0}::-webkit-datetime-edit-hour-field{padding-block:0}::-webkit-datetime-edit-minute-field{padding-block:0}::-webkit-datetime-edit-second-field{padding-block:0}::-webkit-datetime-edit-millisecond-field{padding-block:0}::-webkit-datetime-edit-meridiem-field{padding-block:0}:-moz-ui-invalid{box-shadow:none}button,input:where([type=button],[type=reset],[type=submit]){-webkit-appearance:button;-moz-appearance:button;appearance:button}::file-selector-button{-webkit-appearance:button;-moz-appearance:button;appearance:button}::-webkit-inner-spin-button{height:auto}::-webkit-outer-spin-button{height:auto}[hidden]:where(:not([hidden=until-found])){display:none!important}html{font-family:ui-sans-serif,system-ui,-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Helvetica Neue,Arial,Noto Sans,sans-serif}}@layer components;@layer utilities{.pointer-events-none{pointer-events:none}.invisible{visibility:hidden}.visible{visibility:visible}.sr-only{clip:rect(0,0,0,0);white-space:nowrap;border-width:0;width:1px;height:1px;margin:-1px;padding:0;position:absolute;overflow:hidden}.absolute{position:absolute}.fixed{position:fixed}.relative{position:relative}.static{position:static}.inset-0{inset:calc(var(--spacing)*0)}.inset-y-0{inset-block:calc(var(--spacing)*0)}.top-0{top:calc(var(--spacing)*0)}.top-1\/2{top:50%}.top-2{top:calc(var(--spacing)*2)}.top-4{top:calc(var(--spacing)*4)}.top-full{top:100%}.right-0{right:calc(var(--spacing)*0)}.right-2{right:calc(var(--spacing)*2)}.right-4{right:calc(var(--spacing)*4)}.right-full{right:100%}.bottom-full{bottom:100%}.left-0{left:calc(var(--spacing)*0)}.left-1\/2{left:50%}.left-full{left:100%}.z-0{z-index:0}.z-10{z-index:10}.z-40{z-index:40}.z-50{z-index:50}.z-\[60\]{z-index:60}.container{width:100%}@media (min-width:640px){.container{max-width:640px}}@media (min-width:768px){.container{max-width:768px}}@media (min-width:1024px){.container{max-width:1024px}}@media (min-width:1280px){.container{max-width:1280px}}@media (min-width:1536px){.container{max-width:1536px}}.mx-1{margin-inline:calc(var(--spacing)*1)}.mx-2{margin-inline:calc(var(--spacing)*2)}.mx-4{margin-inline:calc(var(--spacing)*4)}.mx-auto{margin-inline:auto}.mt-0\.5{margin-top:calc(var(--spacing)*.5)}.mt-1{margin-top:calc(var(--spacing)*1)}.mt-2{margin-top:calc(var(--spacing)*2)}.mt-3{margin-top:calc(var(--spacing)*3)}.mt-4{margin-top:calc(var(--spacing)*4)}.mt-5{margin-top:calc(var(--spacing)*5)}.mt-6{margin-top:calc(var(--spacing)*6)}.mt-8{margin-top:calc(var(--spacing)*8)}.-mr-0\.5{margin-right:calc(var(--spacing)*-.5)}.-mr-1{margin-right:calc(var(--spacing)*-1)}.-mr-12{margin-right:calc(var(--spacing)*-12)}.mr-2{margin-right:calc(var(--spacing)*2)}.mr-2\.5{margin-right:calc(var(--spacing)*2.5)}.mr-3{margin-right:calc(var(--spacing)*3)}.mr-4{margin-right:calc(var(--spacing)*4)}.mb-1{margin-bottom:calc(var(--spacing)*1)}.mb-2{margin-bottom:calc(var(--spacing)*2)}.mb-3{margin-bottom:calc(var(--spacing)*3)}.mb-4{margin-bottom:calc(var(--spacing)*4)}.mb-6{margin-bottom:calc(var(--spacing)*6)}.-ml-0\.5{margin-left:calc(var(--spacing)*-.5)}.-ml-1{margin-left:calc(var(--spacing)*-1)}.ml-1{margin-left:calc(var(--spacing)*1)}.ml-2{margin-left:calc(var(--spacing)*2)}.ml-3{margin-left:calc(var(--spacing)*3)}.ml-4{margin-left:calc(var(--spacing)*4)}.ml-5{margin-left:calc(var(--spacing)*5)}.ml-6{margin-left:calc(var(--spacing)*6)}.block{display:block}.contents{display:contents}.flex{display:flex}.grid{display:grid}.hidden{display:none}.inline{display:inline}.inline-block{display:inline-block}.inline-flex{display:inline-flex}.table{display:table}.h-0{height:calc(var(--spacing)*0)}.h-2{height:calc(var(--spacing)*2)}.h-2\.5{height:calc(var(--spacing)*2.5)}.h-3{height:calc(var(--spacing)*3)}.h-4{height:calc(var(--spacing)*4)}.h-5{height:calc(var(--spacing)*5)}.h-6{height:calc(var(--spacing)*6)}.h-8{height:calc(var(--spacing)*8)}.h-10{height:calc(var(--spacing)*10)}.h-12{height:calc(var(--spacing)*12)}.h-16{height:calc(var(--spacing)*16)}.h-24{height:calc(var(--spacing)*24)}.h-48{height:calc(var(--spacing)*48)}.h-full{height:100%}.max-h-96{max-height:calc(var(--spacing)*96)}.max-h-\[90vh\]{max-height:90vh}.max-h-screen{max-height:100vh}.min-h-0{min-height:calc(var(--spacing)*0)}.min-h-\[38px\]{min-height:38px}.min-h-screen{min-height:100vh}.w-0{width:calc(var(--spacing)*0)}.w-2{width:calc(var(--spacing)*2)}.w-2\.5{width:calc(var(--spacing)*2.5)}.w-3{width:calc(var(--spacing)*3)}.w-4{width:calc(var(--spacing)*4)}.w-5{width:calc(var(--spacing)*5)}.w-6{width:calc(var(--spacing)*6)}.w-8{width:calc(var(--spacing)*8)}.w-10{width:calc(var(--spacing)*10)}.w-12{width:calc(var(--spacing)*12)}.w-16{width:calc(var(--spacing)*16)}.w-20{width:calc(var(--spacing)*20)}.w-64{width:calc(var(--spacing)*64)}.w-80{width:calc(var(--spacing)*80)}.w-auto{width:auto}.w-full{width:100%}.max-w-2xl{max-width:var(--container-2xl)}.max-w-6xl{max-width:var(--container-6xl)}.max-w-7xl{max-width:var(--container-7xl)}.max-w-full{max-width:100%}.max-w-md{max-width:var(--container-md)}.max-w-sm{max-width:var(--container-sm)}.max-w-xl{max-width:var(--container-xl)}.max-w-xs{max-width:var(--container-xs)}.min-w-0{min-width:calc(var(--spacing)*0)}.flex-1{flex:1}.flex-shrink-0{flex-shrink:0}.-translate-x-1\/2{--tw-translate-x: -50% ;translate:var(--tw-translate-x)var(--tw-translate-y)}.-translate-y-1\/2{--tw-translate-y: -50% ;translate:var(--tw-translate-x)var(--tw-translate-y)}.rotate-90{rotate:90deg}.transform{transform:var(--tw-rotate-x,)var(--tw-rotate-y,)var(--tw-rotate-z,)var(--tw-skew-x,)var(--tw-skew-y,)}.animate-pulse{animation:var(--animate-pulse)}.animate-spin{animation:var(--animate-spin)}.cursor-default{cursor:default}.cursor-help{cursor:help}.cursor-not-allowed{cursor:not-allowed}.cursor-pointer{cursor:pointer}.resize-none{resize:none}.list-inside{list-style-position:inside}.list-disc{list-style-type:disc}.appearance-none{-webkit-appearance:none;-moz-appearance:none;appearance:none}.grid-cols-1{grid-template-columns:repeat(1,minmax(0,1fr))}.grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.grid-cols-3{grid-template-columns:repeat(3,minmax(0,1fr))}.flex-col{flex-direction:column}.flex-wrap{flex-wrap:wrap}.items-center{align-items:center}.items-start{align-items:flex-start}.justify-between{justify-content:space-between}.justify-center{justify-content:center}.justify-end{justify-content:flex-end}.gap-0{gap:calc(var(--spacing)*0)}.gap-2{gap:calc(var(--spacing)*2)}.gap-4{gap:calc(var(--spacing)*4)}.gap-5{gap:calc(var(--spacing)*5)}.gap-6{gap:calc(var(--spacing)*6)}:where(.-space-y-px>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(-1px*var(--tw-space-y-reverse));margin-block-end:calc(-1px*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-1>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*1)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*1)*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-2>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*2)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*2)*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-3>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*3)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*3)*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-4>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*4)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*4)*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-6>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*6)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*6)*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-8>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*8)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*8)*calc(1 - var(--tw-space-y-reverse)))}.gap-x-4{-moz-column-gap:calc(var(--spacing)*4);column-gap:calc(var(--spacing)*4)}:where(.-space-x-px>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(-1px*var(--tw-space-x-reverse));margin-inline-end:calc(-1px*calc(1 - var(--tw-space-x-reverse)))}:where(.space-x-1>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(calc(var(--spacing)*1)*var(--tw-space-x-reverse));margin-inline-end:calc(calc(var(--spacing)*1)*calc(1 - var(--tw-space-x-reverse)))}:where(.space-x-2>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(calc(var(--spacing)*2)*var(--tw-space-x-reverse));margin-inline-end:calc(calc(var(--spacing)*2)*calc(1 - var(--tw-space-x-reverse)))}:where(.space-x-3>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(calc(var(--spacing)*3)*var(--tw-space-x-reverse));margin-inline-end:calc(calc(var(--spacing)*3)*calc(1 - var(--tw-space-x-reverse)))}:where(.space-x-4>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(calc(var(--spacing)*4)*var(--tw-space-x-reverse));margin-inline-end:calc(calc(var(--spacing)*4)*calc(1 - var(--tw-space-x-reverse)))}.gap-y-6{row-gap:calc(var(--spacing)*6)}:where(.divide-y>:not(:last-child)){--tw-divide-y-reverse:0;border-bottom-style:var(--tw-border-style);border-top-style:var(--tw-border-style);border-top-width:calc(1px*var(--tw-divide-y-reverse));border-bottom-width:calc(1px*calc(1 - var(--tw-divide-y-reverse)))}:where(.divide-gray-200>:not(:last-child)){border-color:var(--color-gray-200)}.truncate{text-overflow:ellipsis;white-space:nowrap;overflow:hidden}.overflow-hidden{overflow:hidden}.overflow-x-auto{overflow-x:auto}.overflow-y-auto{overflow-y:auto}.scroll-smooth{scroll-behavior:smooth}.rounded{border-radius:.25rem}.rounded-full{border-radius:3.40282e38px}.rounded-lg{border-radius:var(--radius-lg)}.rounded-md{border-radius:var(--radius-md)}.rounded-none{border-radius:0}.rounded-t-md{border-top-left-radius:var(--radius-md);border-top-right-radius:var(--radius-md)}.rounded-l-md{border-top-left-radius:var(--radius-md);border-bottom-left-radius:var(--radius-md)}.rounded-l-none{border-top-left-radius:0;border-bottom-left-radius:0}.rounded-r-md{border-top-right-radius:var(--radius-md);border-bottom-right-radius:var(--radius-md)}.rounded-r-none{border-top-right-radius:0;border-bottom-right-radius:0}.rounded-b-md{border-bottom-right-radius:var(--radius-md);border-bottom-left-radius:var(--radius-md)}.border{border-style:var(--tw-border-style);border-width:1px}.border-2{border-style:var(--tw-border-style);border-width:2px}.border-4{border-style:var(--tw-border-style);border-width:4px}.border-t{border-top-style:var(--tw-border-style);border-top-width:1px}.border-r{border-right-style:var(--tw-border-style);border-right-width:1px}.border-b{border-bottom-style:var(--tw-border-style);border-bottom-width:1px}.border-b-2{border-bottom-style:var(--tw-border-style);border-bottom-width:2px}.border-l-0{border-left-style:var(--tw-border-style);border-left-width:0}.border-dashed{--tw-border-style:dashed;border-style:dashed}.border-blue-200{border-color:var(--color-blue-200)}.border-blue-500{border-color:var(--color-blue-500)}.border-blue-600{border-color:var(--color-blue-600)}.border-gray-100{border-color:var(--color-gray-100)}.border-gray-200{border-color:var(--color-gray-200)}.border-gray-300{border-color:var(--color-gray-300)}.border-green-200{border-color:var(--color-green-200)}.border-red-200{border-color:var(--color-red-200)}.border-red-300{border-color:var(--color-red-300)}.border-transparent{border-color:#0000}.border-white{border-color:var(--color-white)}.border-yellow-200{border-color:var(--color-yellow-200)}.border-t-gray-900{border-top-color:var(--color-gray-900)}.border-r-gray-900{border-right-color:var(--color-gray-900)}.border-b-gray-900{border-bottom-color:var(--color-gray-900)}.border-l-gray-900{border-left-color:var(--color-gray-900)}.bg-black\/30{background-color:#0000004d}@supports (color:color-mix(in lab,red,red)){.bg-black\/30{background-color:color-mix(in oklab,var(--color-black)30%,transparent)}}.bg-blue-50{background-color:var(--color-blue-50)}.bg-blue-100{background-color:var(--color-blue-100)}.bg-blue-500{background-color:var(--color-blue-500)}.bg-blue-600{background-color:var(--color-blue-600)}.bg-gray-50{background-color:var(--color-gray-50)}.bg-gray-100{background-color:var(--color-gray-100)}.bg-gray-200{background-color:var(--color-gray-200)}.bg-gray-400{background-color:var(--color-gray-400)}.bg-gray-500{background-color:var(--color-gray-500)}.bg-gray-900{background-color:var(--color-gray-900)}.bg-green-50{background-color:var(--color-green-50)}.bg-green-100{background-color:var(--color-green-100)}.bg-green-500{background-color:var(--color-green-500)}.bg-orange-50{background-color:var(--color-orange-50)}.bg-purple-50{background-color:var(--color-purple-50)}.bg-purple-500{background-color:var(--color-purple-500)}.bg-red-50{background-color:var(--color-red-50)}.bg-red-100{background-color:var(--color-red-100)}.bg-red-500{background-color:var(--color-red-500)}.bg-red-600{background-color:var(--color-red-600)}.bg-red-900{background-color:var(--color-red-900)}.bg-white{background-color:var(--color-white)}.bg-yellow-50{background-color:var(--color-yellow-50)}.bg-yellow-100{background-color:var(--color-yellow-100)}.bg-yellow-500{background-color:var(--color-yellow-500)}.bg-gradient-to-r{--tw-gradient-position:to right in oklab;background-image:linear-gradient(var(--tw-gradient-stops))}.from-gray-50{--tw-gradient-from:var(--color-gray-50);--tw-gradient-stops:var(--tw-gradient-via-stops,var(--tw-gradient-position),var(--tw-gradient-from)var(--tw-gradient-from-position),var(--tw-gradient-to)var(--tw-gradient-to-position))}.to-white{--tw-gradient-to:var(--color-white);--tw-gradient-stops:var(--tw-gradient-via-stops,var(--tw-gradient-position),var(--tw-gradient-from)var(--tw-gradient-from-position),var(--tw-gradient-to)var(--tw-gradient-to-position))}.p-1{padding:calc(var(--spacing)*1)}.p-2{padding:calc(var(--spacing)*2)}.p-3{padding:calc(var(--spacing)*3)}.p-4{padding:calc(var(--spacing)*4)}.p-5{padding:calc(var(--spacing)*5)}.p-6{padding:calc(var(--spacing)*6)}.px-2{padding-inline:calc(var(--spacing)*2)}.px-2\.5{padding-inline:calc(var(--spacing)*2.5)}.px-3{padding-inline:calc(var(--spacing)*3)}.px-4{padding-inline:calc(var(--spacing)*4)}.px-6{padding-inline:calc(var(--spacing)*6)}.py-0\.5{padding-block:calc(var(--spacing)*.5)}.py-1{padding-block:calc(var(--spacing)*1)}.py-2{padding-block:calc(var(--spacing)*2)}.py-3{padding-block:calc(var(--spacing)*3)}.py-4{padding-block:calc(var(--spacing)*4)}.py-5{padding-block:calc(var(--spacing)*5)}.py-6{padding-block:calc(var(--spacing)*6)}.py-8{padding-block:calc(var(--spacing)*8)}.py-12{padding-block:calc(var(--spacing)*12)}.pt-2{padding-top:calc(var(--spacing)*2)}.pt-4{padding-top:calc(var(--spacing)*4)}.pt-5{padding-top:calc(var(--spacing)*5)}.pt-6{padding-top:calc(var(--spacing)*6)}.pt-20{padding-top:calc(var(--spacing)*20)}.pr-3{padding-right:calc(var(--spacing)*3)}.pr-8{padding-right:calc(var(--spacing)*8)}.pb-2{padding-bottom:calc(var(--spacing)*2)}.pb-4{padding-bottom:calc(var(--spacing)*4)}.pl-2{padding-left:calc(var(--spacing)*2)}.pl-3{padding-left:calc(var(--spacing)*3)}.pl-10{padding-left:calc(var(--spacing)*10)}.text-center{text-align:center}.text-left{text-align:left}.text-right{text-align:right}.font-mono{font-family:var(--font-mono)}.text-2xl{font-size:var(--text-2xl);line-height:var(--tw-leading,var(--text-2xl--line-height))}.text-3xl{font-size:var(--text-3xl);line-height:var(--tw-leading,var(--text-3xl--line-height))}.text-base{font-size:var(--text-base);line-height:var(--tw-leading,var(--text-base--line-height))}.text-lg{font-size:var(--text-lg);line-height:var(--tw-leading,var(--text-lg--line-height))}.text-sm{font-size:var(--text-sm);line-height:var(--tw-leading,var(--text-sm--line-height))}.text-xl{font-size:var(--text-xl);line-height:var(--tw-leading,var(--text-xl--line-height))}.text-xs{font-size:var(--text-xs);line-height:var(--tw-leading,var(--text-xs--line-height))}.leading-5{--tw-leading:calc(var(--spacing)*5);line-height:calc(var(--spacing)*5)}.leading-6{--tw-leading:calc(var(--spacing)*6);line-height:calc(var(--spacing)*6)}.font-bold{--tw-font-weight:var(--font-weight-bold);font-weight:var(--font-weight-bold)}.font-extrabold{--tw-font-weight:var(--font-weight-extrabold);font-weight:var(--font-weight-extrabold)}.font-medium{--tw-font-weight:var(--font-weight-medium);font-weight:var(--font-weight-medium)}.font-semibold{--tw-font-weight:var(--font-weight-semibold);font-weight:var(--font-weight-semibold)}.tracking-wide{--tw-tracking:var(--tracking-wide);letter-spacing:var(--tracking-wide)}.tracking-wider{--tw-tracking:var(--tracking-wider);letter-spacing:var(--tracking-wider)}.break-all{word-break:break-all}.text-black{color:var(--color-black)}.text-blue-400{color:var(--color-blue-400)}.text-blue-600{color:var(--color-blue-600)}.text-blue-700{color:var(--color-blue-700)}.text-blue-800{color:var(--color-blue-800)}.text-gray-300{color:var(--color-gray-300)}.text-gray-400{color:var(--color-gray-400)}.text-gray-500{color:var(--color-gray-500)}.text-gray-600{color:var(--color-gray-600)}.text-gray-700{color:var(--color-gray-700)}.text-gray-800{color:var(--color-gray-800)}.text-gray-900{color:var(--color-gray-900)}.text-green-400{color:var(--color-green-400)}.text-green-500{color:var(--color-green-500)}.text-green-600{color:var(--color-green-600)}.text-green-700{color:var(--color-green-700)}.text-green-800{color:var(--color-green-800)}.text-indigo-600{color:var(--color-indigo-600)}.text-orange-700{color:var(--color-orange-700)}.text-purple-600{color:var(--color-purple-600)}.text-purple-700{color:var(--color-purple-700)}.text-red-400{color:var(--color-red-400)}.text-red-500{color:var(--color-red-500)}.text-red-600{color:var(--color-red-600)}.text-red-700{color:var(--color-red-700)}.text-red-800{color:var(--color-red-800)}.text-red-900{color:var(--color-red-900)}.text-white{color:var(--color-white)}.text-yellow-400{color:var(--color-yellow-400)}.text-yellow-600{color:var(--color-yellow-600)}.text-yellow-700{color:var(--color-yellow-700)}.text-yellow-800{color:var(--color-yellow-800)}.capitalize{text-transform:capitalize}.uppercase{text-transform:uppercase}.italic{font-style:italic}.placeholder-gray-400::-moz-placeholder{color:var(--color-gray-400)}.placeholder-gray-400::placeholder{color:var(--color-gray-400)}.placeholder-gray-500::-moz-placeholder{color:var(--color-gray-500)}.placeholder-gray-500::placeholder{color:var(--color-gray-500)}.opacity-0{opacity:0}.opacity-25{opacity:.25}.opacity-50{opacity:.5}.opacity-75{opacity:.75}.shadow{--tw-shadow:0 1px 3px 0 var(--tw-shadow-color,#0000001a),0 1px 2px -1px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.shadow-lg{--tw-shadow:0 10px 15px -3px var(--tw-shadow-color,#0000001a),0 4px 6px -4px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.shadow-sm{--tw-shadow:0 1px 3px 0 var(--tw-shadow-color,#0000001a),0 1px 2px -1px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.shadow-xl{--tw-shadow:0 20px 25px -5px var(--tw-shadow-color,#0000001a),0 8px 10px -6px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.ring,.ring-1{--tw-ring-shadow:var(--tw-ring-inset,)0 0 0 calc(1px + var(--tw-ring-offset-width))var(--tw-ring-color,currentcolor);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.ring-blue-200{--tw-ring-color:var(--color-blue-200)}.ring-blue-600\/20{--tw-ring-color:#155dfc33}@supports (color:color-mix(in lab,red,red)){.ring-blue-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-blue-600)20%,transparent)}}.ring-gray-200{--tw-ring-color:var(--color-gray-200)}.ring-gray-500\/20{--tw-ring-color:#6a728233}@supports (color:color-mix(in lab,red,red)){.ring-gray-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-gray-500)20%,transparent)}}.ring-gray-600\/20{--tw-ring-color:#4a556533}@supports (color:color-mix(in lab,red,red)){.ring-gray-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-gray-600)20%,transparent)}}.ring-green-200{--tw-ring-color:var(--color-green-200)}.ring-green-600\/20{--tw-ring-color:#00a54433}@supports (color:color-mix(in lab,red,red)){.ring-green-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-green-600)20%,transparent)}}.ring-orange-600\/20{--tw-ring-color:#f0510033}@supports (color:color-mix(in lab,red,red)){.ring-orange-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-orange-600)20%,transparent)}}.ring-purple-600\/20{--tw-ring-color:#9810fa33}@supports (color:color-mix(in lab,red,red)){.ring-purple-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-purple-600)20%,transparent)}}.ring-red-200{--tw-ring-color:var(--color-red-200)}.ring-red-600\/20{--tw-ring-color:#e4001433}@supports (color:color-mix(in lab,red,red)){.ring-red-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-red-600)20%,transparent)}}.ring-yellow-200{--tw-ring-color:var(--color-yellow-200)}.ring-yellow-600\/20{--tw-ring-color:#cd890033}@supports (color:color-mix(in lab,red,red)){.ring-yellow-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-yellow-600)20%,transparent)}}.filter{filter:var(--tw-blur,)var(--tw-brightness,)var(--tw-contrast,)var(--tw-grayscale,)var(--tw-hue-rotate,)var(--tw-invert,)var(--tw-saturate,)var(--tw-sepia,)var(--tw-drop-shadow,)}.transition{transition-property:color,background-color,border-color,outline-color,text-decoration-color,fill,stroke,--tw-gradient-from,--tw-gradient-via,--tw-gradient-to,opacity,box-shadow,transform,translate,scale,rotate,filter,-webkit-backdrop-filter,backdrop-filter,display,visibility,content-visibility,overlay,pointer-events;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.transition-all{transition-property:all;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.transition-colors{transition-property:color,background-color,border-color,outline-color,text-decoration-color,fill,stroke,--tw-gradient-from,--tw-gradient-via,--tw-gradient-to;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.transition-shadow{transition-property:box-shadow;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.transition-transform{transition-property:transform,translate,scale,rotate;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.duration-200{--tw-duration:.2s;transition-duration:.2s}.duration-300{--tw-duration:.3s;transition-duration:.3s}.ease-in-out{--tw-ease:var(--ease-in-out);transition-timing-function:var(--ease-in-out)}.ring-inset{--tw-ring-inset:inset}@media (hover:hover){.group-hover\:visible:is(:where(.group):hover *){visibility:visible}.group-hover\:opacity-100:is(:where(.group):hover *){opacity:1}}.first\:rounded-l-md:first-child{border-top-left-radius:var(--radius-md);border-bottom-left-radius:var(--radius-md)}.first\:border-l:first-child{border-left-style:var(--tw-border-style);border-left-width:1px}@media (hover:hover){.hover\:scale-105:hover{--tw-scale-x:105%;--tw-scale-y:105%;--tw-scale-z:105%;scale:var(--tw-scale-x)var(--tw-scale-y)}.hover\:border-blue-400:hover{border-color:var(--color-blue-400)}.hover\:border-gray-400:hover{border-color:var(--color-gray-400)}.hover\:bg-blue-200:hover{background-color:var(--color-blue-200)}.hover\:bg-blue-700:hover{background-color:var(--color-blue-700)}.hover\:bg-gray-50:hover{background-color:var(--color-gray-50)}.hover\:bg-gray-200:hover{background-color:var(--color-gray-200)}.hover\:bg-red-200:hover{background-color:var(--color-red-200)}.hover\:bg-red-700:hover{background-color:var(--color-red-700)}.hover\:text-blue-500:hover{color:var(--color-blue-500)}.hover\:text-blue-600:hover{color:var(--color-blue-600)}.hover\:text-gray-600:hover{color:var(--color-gray-600)}.hover\:text-gray-700:hover{color:var(--color-gray-700)}.hover\:text-gray-800:hover{color:var(--color-gray-800)}.hover\:text-gray-900:hover{color:var(--color-gray-900)}.hover\:text-green-500:hover{color:var(--color-green-500)}.hover\:text-green-900:hover{color:var(--color-green-900)}.hover\:text-indigo-900:hover{color:var(--color-indigo-900)}.hover\:text-red-500:hover{color:var(--color-red-500)}.hover\:text-red-900:hover{color:var(--color-red-900)}.hover\:text-yellow-300:hover{color:var(--color-yellow-300)}.hover\:text-yellow-500:hover{color:var(--color-yellow-500)}.hover\:underline:hover{text-decoration-line:underline}.hover\:shadow-md:hover{--tw-shadow:0 4px 6px -1px var(--tw-shadow-color,#0000001a),0 2px 4px -2px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.hover\:shadow-sm:hover{--tw-shadow:0 1px 3px 0 var(--tw-shadow-color,#0000001a),0 1px 2px -1px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}}.focus\:z-10:focus{z-index:10}.focus\:border-blue-500:focus{border-color:var(--color-blue-500)}.focus\:bg-red-200:focus{background-color:var(--color-red-200)}.focus\:placeholder-gray-400:focus::-moz-placeholder{color:var(--color-gray-400)}.focus\:placeholder-gray-400:focus::placeholder{color:var(--color-gray-400)}.focus\:ring-1:focus{--tw-ring-shadow:var(--tw-ring-inset,)0 0 0 calc(1px + var(--tw-ring-offset-width))var(--tw-ring-color,currentcolor);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.focus\:ring-2:focus{--tw-ring-shadow:var(--tw-ring-inset,)0 0 0 calc(2px + var(--tw-ring-offset-width))var(--tw-ring-color,currentcolor);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.focus\:ring-blue-500:focus{--tw-ring-color:var(--color-blue-500)}.focus\:ring-gray-500:focus{--tw-ring-color:var(--color-gray-500)}.focus\:ring-green-500:focus{--tw-ring-color:var(--color-green-500)}.focus\:ring-indigo-500:focus{--tw-ring-color:var(--color-indigo-500)}.focus\:ring-red-500:focus{--tw-ring-color:var(--color-red-500)}.focus\:ring-white:focus{--tw-ring-color:var(--color-white)}.focus\:ring-yellow-500:focus{--tw-ring-color:var(--color-yellow-500)}.focus\:ring-offset-2:focus{--tw-ring-offset-width:2px;--tw-ring-offset-shadow:var(--tw-ring-inset,)0 0 0 var(--tw-ring-offset-width)var(--tw-ring-offset-color)}.focus\:outline-none:focus{--tw-outline-style:none;outline-style:none}.focus\:ring-inset:focus{--tw-ring-inset:inset}.disabled\:cursor-not-allowed:disabled{cursor:not-allowed}.disabled\:bg-gray-400:disabled{background-color:var(--color-gray-400)}.disabled\:opacity-50:disabled{opacity:.5}@media (hover:hover){.disabled\:hover\:bg-gray-400:disabled:hover{background-color:var(--color-gray-400)}}@media (min-width:640px){.sm\:mx-auto{margin-inline:auto}.sm\:mt-0{margin-top:calc(var(--spacing)*0)}.sm\:ml-4{margin-left:calc(var(--spacing)*4)}.sm\:block{display:block}.sm\:flex{display:flex}.sm\:hidden{display:none}.sm\:w-full{width:100%}.sm\:max-w-md{max-width:var(--container-md)}.sm\:flex-1{flex:1}.sm\:grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.sm\:items-center{align-items:center}.sm\:justify-between{justify-content:space-between}.sm\:rounded-lg{border-radius:var(--radius-lg)}.sm\:p-6{padding:calc(var(--spacing)*6)}.sm\:px-6{padding-inline:calc(var(--spacing)*6)}.sm\:px-10{padding-inline:calc(var(--spacing)*10)}.sm\:text-sm{font-size:var(--text-sm);line-height:var(--tw-leading,var(--text-sm--line-height))}}@media (min-width:768px){.md\:ml-2{margin-left:calc(var(--spacing)*2)}.md\:grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.md\:grid-cols-3{grid-template-columns:repeat(3,minmax(0,1fr))}:where(.md\:space-x-3>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(calc(var(--spacing)*3)*var(--tw-space-x-reverse));margin-inline-end:calc(calc(var(--spacing)*3)*calc(1 - var(--tw-space-x-reverse)))}}@media (min-width:1024px){.lg\:fixed{position:fixed}.lg\:inset-y-0{inset-block:calc(var(--spacing)*0)}.lg\:flex{display:flex}.lg\:hidden{display:none}.lg\:w-64{width:calc(var(--spacing)*64)}.lg\:grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.lg\:grid-cols-3{grid-template-columns:repeat(3,minmax(0,1fr))}.lg\:grid-cols-4{grid-template-columns:repeat(4,minmax(0,1fr))}.lg\:flex-col{flex-direction:column}.lg\:px-8{padding-inline:calc(var(--spacing)*8)}.lg\:pt-6{padding-top:calc(var(--spacing)*6)}.lg\:pl-64{padding-left:calc(var(--spacing)*64)}}.dark .dark\:block{display:block}.dark .dark\:hidden{display:none}:where(.dark .dark\:divide-gray-700>:not(:last-child)){border-color:var(--color-gray-700)}.dark .dark\:border-blue-700{border-color:var(--color-blue-700)}.dark .dark\:border-blue-800{border-color:var(--color-blue-800)}.dark .dark\:border-gray-600{border-color:var(--color-gray-600)}.dark .dark\:border-gray-700{border-color:var(--color-gray-700)}.dark .dark\:border-green-700{border-color:var(--color-green-700)}.dark .dark\:border-red-600{border-color:var(--color-red-600)}.dark .dark\:border-red-700{border-color:var(--color-red-700)}.dark .dark\:border-red-800{border-color:var(--color-red-800)}.dark .dark\:border-yellow-700{border-color:var(--color-yellow-700)}.dark .dark\:border-yellow-800{border-color:var(--color-yellow-800)}.dark .dark\:bg-black\/50{background-color:#00000080}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-black\/50{background-color:color-mix(in oklab,var(--color-black)50%,transparent)}}.dark .dark\:bg-blue-500\/10{background-color:#3080ff1a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-blue-500\/10{background-color:color-mix(in oklab,var(--color-blue-500)10%,transparent)}}.dark .dark\:bg-blue-900{background-color:var(--color-blue-900)}.dark .dark\:bg-blue-900\/20{background-color:#1c398e33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-blue-900\/20{background-color:color-mix(in oklab,var(--color-blue-900)20%,transparent)}}.dark .dark\:bg-blue-900\/50{background-color:#1c398e80}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-blue-900\/50{background-color:color-mix(in oklab,var(--color-blue-900)50%,transparent)}}.dark .dark\:bg-gray-500\/10{background-color:#6a72821a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-gray-500\/10{background-color:color-mix(in oklab,var(--color-gray-500)10%,transparent)}}.dark .dark\:bg-gray-600{background-color:var(--color-gray-600)}.dark .dark\:bg-gray-700{background-color:var(--color-gray-700)}.dark .dark\:bg-gray-800{background-color:var(--color-gray-800)}.dark .dark\:bg-gray-800\/50{background-color:#1e293980}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-gray-800\/50{background-color:color-mix(in oklab,var(--color-gray-800)50%,transparent)}}.dark .dark\:bg-gray-900{background-color:var(--color-gray-900)}.dark .dark\:bg-gray-900\/50{background-color:#10182880}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-gray-900\/50{background-color:color-mix(in oklab,var(--color-gray-900)50%,transparent)}}.dark .dark\:bg-green-500\/10{background-color:#00c7581a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-green-500\/10{background-color:color-mix(in oklab,var(--color-green-500)10%,transparent)}}.dark .dark\:bg-green-900{background-color:var(--color-green-900)}.dark .dark\:bg-green-900\/50{background-color:#0d542b80}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-green-900\/50{background-color:color-mix(in oklab,var(--color-green-900)50%,transparent)}}.dark .dark\:bg-orange-500\/10{background-color:#fe6e001a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-orange-500\/10{background-color:color-mix(in oklab,var(--color-orange-500)10%,transparent)}}.dark .dark\:bg-purple-500\/10{background-color:#ac4bff1a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-purple-500\/10{background-color:color-mix(in oklab,var(--color-purple-500)10%,transparent)}}.dark .dark\:bg-red-500\/10{background-color:#fb2c361a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-red-500\/10{background-color:color-mix(in oklab,var(--color-red-500)10%,transparent)}}.dark .dark\:bg-red-700{background-color:var(--color-red-700)}.dark .dark\:bg-red-800{background-color:var(--color-red-800)}.dark .dark\:bg-red-900{background-color:var(--color-red-900)}.dark .dark\:bg-red-900\/20{background-color:#82181a33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-red-900\/20{background-color:color-mix(in oklab,var(--color-red-900)20%,transparent)}}.dark .dark\:bg-red-900\/50{background-color:#82181a80}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-red-900\/50{background-color:color-mix(in oklab,var(--color-red-900)50%,transparent)}}.dark .dark\:bg-yellow-500\/10{background-color:#edb2001a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-yellow-500\/10{background-color:color-mix(in oklab,var(--color-yellow-500)10%,transparent)}}.dark .dark\:bg-yellow-900{background-color:var(--color-yellow-900)}.dark .dark\:bg-yellow-900\/20{background-color:#733e0a33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-yellow-900\/20{background-color:color-mix(in oklab,var(--color-yellow-900)20%,transparent)}}.dark .dark\:from-gray-800{--tw-gradient-from:var(--color-gray-800);--tw-gradient-stops:var(--tw-gradient-via-stops,var(--tw-gradient-position),var(--tw-gradient-from)var(--tw-gradient-from-position),var(--tw-gradient-to)var(--tw-gradient-to-position))}.dark .dark\:to-gray-700{--tw-gradient-to:var(--color-gray-700);--tw-gradient-stops:var(--tw-gradient-via-stops,var(--tw-gradient-position),var(--tw-gradient-from)var(--tw-gradient-from-position),var(--tw-gradient-to)var(--tw-gradient-to-position))}.dark .dark\:text-blue-200{color:var(--color-blue-200)}.dark .dark\:text-blue-300{color:var(--color-blue-300)}.dark .dark\:text-blue-400{color:var(--color-blue-400)}.dark .dark\:text-gray-200{color:var(--color-gray-200)}.dark .dark\:text-gray-300{color:var(--color-gray-300)}.dark .dark\:text-gray-400{color:var(--color-gray-400)}.dark .dark\:text-gray-500{color:var(--color-gray-500)}.dark .dark\:text-green-200{color:var(--color-green-200)}.dark .dark\:text-green-300{color:var(--color-green-300)}.dark .dark\:text-green-400{color:var(--color-green-400)}.dark .dark\:text-indigo-400{color:var(--color-indigo-400)}.dark .dark\:text-orange-400{color:var(--color-orange-400)}.dark .dark\:text-purple-400{color:var(--color-purple-400)}.dark .dark\:text-red-100{color:var(--color-red-100)}.dark .dark\:text-red-200{color:var(--color-red-200)}.dark .dark\:text-red-300{color:var(--color-red-300)}.dark .dark\:text-red-400{color:var(--color-red-400)}.dark .dark\:text-white{color:var(--color-white)}.dark .dark\:text-yellow-200{color:var(--color-yellow-200)}.dark .dark\:text-yellow-300{color:var(--color-yellow-300)}.dark .dark\:text-yellow-400{color:var(--color-yellow-400)}.dark .dark\:placeholder-gray-400::-moz-placeholder{color:var(--color-gray-400)}.dark .dark\:placeholder-gray-400::placeholder{color:var(--color-gray-400)}.dark .dark\:placeholder-gray-500::-moz-placeholder{color:var(--color-gray-500)}.dark .dark\:placeholder-gray-500::placeholder{color:var(--color-gray-500)}.dark .dark\:ring-blue-400\/20{--tw-ring-color:#54a2ff33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-blue-400\/20{--tw-ring-color:color-mix(in oklab,var(--color-blue-400)20%,transparent)}}.dark .dark\:ring-blue-400\/30{--tw-ring-color:#54a2ff4d}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-blue-400\/30{--tw-ring-color:color-mix(in oklab,var(--color-blue-400)30%,transparent)}}.dark .dark\:ring-blue-500\/20{--tw-ring-color:#3080ff33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-blue-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-blue-500)20%,transparent)}}.dark .dark\:ring-gray-400\/20{--tw-ring-color:#99a1af33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-gray-400\/20{--tw-ring-color:color-mix(in oklab,var(--color-gray-400)20%,transparent)}}.dark .dark\:ring-gray-400\/30{--tw-ring-color:#99a1af4d}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-gray-400\/30{--tw-ring-color:color-mix(in oklab,var(--color-gray-400)30%,transparent)}}.dark .dark\:ring-gray-500\/20{--tw-ring-color:#6a728233}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-gray-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-gray-500)20%,transparent)}}.dark .dark\:ring-green-400\/20{--tw-ring-color:#05df7233}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-green-400\/20{--tw-ring-color:color-mix(in oklab,var(--color-green-400)20%,transparent)}}.dark .dark\:ring-green-400\/30{--tw-ring-color:#05df724d}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-green-400\/30{--tw-ring-color:color-mix(in oklab,var(--color-green-400)30%,transparent)}}.dark .dark\:ring-green-500\/20{--tw-ring-color:#00c75833}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-green-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-green-500)20%,transparent)}}.dark .dark\:ring-orange-500\/20{--tw-ring-color:#fe6e0033}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-orange-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-orange-500)20%,transparent)}}.dark .dark\:ring-purple-500\/20{--tw-ring-color:#ac4bff33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-purple-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-purple-500)20%,transparent)}}.dark .dark\:ring-red-400\/20{--tw-ring-color:#ff656833}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-red-400\/20{--tw-ring-color:color-mix(in oklab,var(--color-red-400)20%,transparent)}}.dark .dark\:ring-red-400\/30{--tw-ring-color:#ff65684d}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-red-400\/30{--tw-ring-color:color-mix(in oklab,var(--color-red-400)30%,transparent)}}.dark .dark\:ring-red-500\/20{--tw-ring-color:#fb2c3633}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-red-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-red-500)20%,transparent)}}.dark .dark\:ring-yellow-400\/30{--tw-ring-color:#fac8004d}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-yellow-400\/30{--tw-ring-color:color-mix(in oklab,var(--color-yellow-400)30%,transparent)}}.dark .dark\:ring-yellow-500\/20{--tw-ring-color:#edb20033}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-yellow-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-yellow-500)20%,transparent)}}@media (hover:hover){.dark .dark\:hover\:border-blue-400:hover{border-color:var(--color-blue-400)}.dark .dark\:hover\:border-gray-500:hover{border-color:var(--color-gray-500)}.dark .dark\:hover\:bg-blue-800:hover{background-color:var(--color-blue-800)}.dark .dark\:hover\:bg-gray-500:hover{background-color:var(--color-gray-500)}.dark .dark\:hover\:bg-gray-600:hover{background-color:var(--color-gray-600)}.dark .dark\:hover\:bg-gray-700:hover{background-color:var(--color-gray-700)}.dark .dark\:hover\:bg-gray-800:hover{background-color:var(--color-gray-800)}.dark .dark\:hover\:bg-red-700:hover{background-color:var(--color-red-700)}.dark .dark\:hover\:bg-red-800:hover{background-color:var(--color-red-800)}.dark .dark\:hover\:text-blue-300:hover{color:var(--color-blue-300)}.dark .dark\:hover\:text-gray-100:hover{color:var(--color-gray-100)}.dark .dark\:hover\:text-gray-300:hover{color:var(--color-gray-300)}.dark .dark\:hover\:text-green-300:hover{color:var(--color-green-300)}.dark .dark\:hover\:text-indigo-300:hover{color:var(--color-indigo-300)}.dark .dark\:hover\:text-red-300:hover{color:var(--color-red-300)}.dark .dark\:hover\:text-white:hover{color:var(--color-white)}}.dark .dark\:focus\:bg-red-700:focus{background-color:var(--color-red-700)}.dark .dark\:focus\:ring-offset-gray-900:focus{--tw-ring-offset-color:var(--color-gray-900)}}@property --tw-translate-x{syntax:"*";inherits:false;initial-value:0}@property --tw-translate-y{syntax:"*";inherits:false;initial-value:0}@property --tw-translate-z{syntax:"*";inherits:false;initial-value:0}@property --tw-rotate-x{syntax:"*";inherits:false}@property --tw-rotate-y{syntax:"*";inherits:false}@property --tw-rotate-z{syntax:"*";inherits:false}@property --tw-skew-x{syntax:"*";inherits:false}@property --tw-skew-y{syntax:"*";inherits:false}@property --tw-space-y-reverse{syntax:"*";inherits:false;initial-value:0}@property --tw-space-x-reverse{syntax:"*";inherits:false;initial-value:0}@property --tw-divide-y-reverse{syntax:"*";inherits:false;initial-value:0}@property --tw-border-style{syntax:"*";inherits:false;initial-value:solid}@property --tw-gradient-position{syntax:"*";inherits:false}@property --tw-gradient-from{syntax:"";inherits:false;initial-value:#0000}@property --tw-gradient-via{syntax:"";inherits:false;initial-value:#0000}@property --tw-gradient-to{syntax:"";inherits:false;initial-value:#0000}@property --tw-gradient-stops{syntax:"*";inherits:false}@property --tw-gradient-via-stops{syntax:"*";inherits:false}@property --tw-gradient-from-position{syntax:"";inherits:false;initial-value:0%}@property --tw-gradient-via-position{syntax:"";inherits:false;initial-value:50%}@property --tw-gradient-to-position{syntax:"";inherits:false;initial-value:100%}@property --tw-leading{syntax:"*";inherits:false}@property --tw-font-weight{syntax:"*";inherits:false}@property --tw-tracking{syntax:"*";inherits:false}@property --tw-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-shadow-color{syntax:"*";inherits:false}@property --tw-shadow-alpha{syntax:"";inherits:false;initial-value:100%}@property --tw-inset-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-inset-shadow-color{syntax:"*";inherits:false}@property --tw-inset-shadow-alpha{syntax:"";inherits:false;initial-value:100%}@property --tw-ring-color{syntax:"*";inherits:false}@property --tw-ring-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-inset-ring-color{syntax:"*";inherits:false}@property --tw-inset-ring-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-ring-inset{syntax:"*";inherits:false}@property --tw-ring-offset-width{syntax:"";inherits:false;initial-value:0}@property --tw-ring-offset-color{syntax:"*";inherits:false;initial-value:#fff}@property --tw-ring-offset-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-blur{syntax:"*";inherits:false}@property --tw-brightness{syntax:"*";inherits:false}@property --tw-contrast{syntax:"*";inherits:false}@property --tw-grayscale{syntax:"*";inherits:false}@property --tw-hue-rotate{syntax:"*";inherits:false}@property --tw-invert{syntax:"*";inherits:false}@property --tw-opacity{syntax:"*";inherits:false}@property --tw-saturate{syntax:"*";inherits:false}@property --tw-sepia{syntax:"*";inherits:false}@property --tw-drop-shadow{syntax:"*";inherits:false}@property --tw-drop-shadow-color{syntax:"*";inherits:false}@property --tw-drop-shadow-alpha{syntax:"";inherits:false;initial-value:100%}@property --tw-drop-shadow-size{syntax:"*";inherits:false}@property --tw-duration{syntax:"*";inherits:false}@property --tw-ease{syntax:"*";inherits:false}@property --tw-scale-x{syntax:"*";inherits:false;initial-value:1}@property --tw-scale-y{syntax:"*";inherits:false;initial-value:1}@property --tw-scale-z{syntax:"*";inherits:false;initial-value:1}@keyframes spin{to{transform:rotate(360deg)}}@keyframes pulse{50%{opacity:.5}} diff --git a/webapp/assets/_app/immutable/assets/_layout.srAxWR-A.css b/webapp/assets/_app/immutable/assets/_layout.srAxWR-A.css deleted file mode 100644 index c83b2922..00000000 --- a/webapp/assets/_app/immutable/assets/_layout.srAxWR-A.css +++ /dev/null @@ -1 +0,0 @@ -/*! tailwindcss v4.1.11 | MIT License | https://tailwindcss.com */@layer properties{@supports ((-webkit-hyphens:none) and (not (margin-trim:inline))) or ((-moz-orient:inline) and (not (color:rgb(from red r g b)))){*,:before,:after,::backdrop{--tw-translate-x:0;--tw-translate-y:0;--tw-translate-z:0;--tw-rotate-x:initial;--tw-rotate-y:initial;--tw-rotate-z:initial;--tw-skew-x:initial;--tw-skew-y:initial;--tw-space-y-reverse:0;--tw-space-x-reverse:0;--tw-divide-y-reverse:0;--tw-border-style:solid;--tw-gradient-position:initial;--tw-gradient-from:#0000;--tw-gradient-via:#0000;--tw-gradient-to:#0000;--tw-gradient-stops:initial;--tw-gradient-via-stops:initial;--tw-gradient-from-position:0%;--tw-gradient-via-position:50%;--tw-gradient-to-position:100%;--tw-leading:initial;--tw-font-weight:initial;--tw-tracking:initial;--tw-shadow:0 0 #0000;--tw-shadow-color:initial;--tw-shadow-alpha:100%;--tw-inset-shadow:0 0 #0000;--tw-inset-shadow-color:initial;--tw-inset-shadow-alpha:100%;--tw-ring-color:initial;--tw-ring-shadow:0 0 #0000;--tw-inset-ring-color:initial;--tw-inset-ring-shadow:0 0 #0000;--tw-ring-inset:initial;--tw-ring-offset-width:0px;--tw-ring-offset-color:#fff;--tw-ring-offset-shadow:0 0 #0000;--tw-blur:initial;--tw-brightness:initial;--tw-contrast:initial;--tw-grayscale:initial;--tw-hue-rotate:initial;--tw-invert:initial;--tw-opacity:initial;--tw-saturate:initial;--tw-sepia:initial;--tw-drop-shadow:initial;--tw-drop-shadow-color:initial;--tw-drop-shadow-alpha:100%;--tw-drop-shadow-size:initial;--tw-duration:initial;--tw-ease:initial;--tw-scale-x:1;--tw-scale-y:1;--tw-scale-z:1}}}@layer theme{:root,:host{--font-sans:ui-sans-serif,system-ui,sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji";--font-mono:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace;--color-red-50:oklch(97.1% .013 17.38);--color-red-100:oklch(93.6% .032 17.717);--color-red-200:oklch(88.5% .062 18.334);--color-red-300:oklch(80.8% .114 19.571);--color-red-400:oklch(70.4% .191 22.216);--color-red-500:oklch(63.7% .237 25.331);--color-red-600:oklch(57.7% .245 27.325);--color-red-700:oklch(50.5% .213 27.518);--color-red-800:oklch(44.4% .177 26.899);--color-red-900:oklch(39.6% .141 25.723);--color-orange-50:oklch(98% .016 73.684);--color-orange-400:oklch(75% .183 55.934);--color-orange-500:oklch(70.5% .213 47.604);--color-orange-600:oklch(64.6% .222 41.116);--color-orange-700:oklch(55.3% .195 38.402);--color-yellow-50:oklch(98.7% .026 102.212);--color-yellow-100:oklch(97.3% .071 103.193);--color-yellow-200:oklch(94.5% .129 101.54);--color-yellow-300:oklch(90.5% .182 98.111);--color-yellow-400:oklch(85.2% .199 91.936);--color-yellow-500:oklch(79.5% .184 86.047);--color-yellow-600:oklch(68.1% .162 75.834);--color-yellow-700:oklch(55.4% .135 66.442);--color-yellow-800:oklch(47.6% .114 61.907);--color-yellow-900:oklch(42.1% .095 57.708);--color-green-50:oklch(98.2% .018 155.826);--color-green-100:oklch(96.2% .044 156.743);--color-green-200:oklch(92.5% .084 155.995);--color-green-300:oklch(87.1% .15 154.449);--color-green-400:oklch(79.2% .209 151.711);--color-green-500:oklch(72.3% .219 149.579);--color-green-600:oklch(62.7% .194 149.214);--color-green-700:oklch(52.7% .154 150.069);--color-green-800:oklch(44.8% .119 151.328);--color-green-900:oklch(39.3% .095 152.535);--color-blue-50:oklch(97% .014 254.604);--color-blue-100:oklch(93.2% .032 255.585);--color-blue-200:oklch(88.2% .059 254.128);--color-blue-300:oklch(80.9% .105 251.813);--color-blue-400:oklch(70.7% .165 254.624);--color-blue-500:oklch(62.3% .214 259.815);--color-blue-600:oklch(54.6% .245 262.881);--color-blue-700:oklch(48.8% .243 264.376);--color-blue-800:oklch(42.4% .199 265.638);--color-blue-900:oklch(37.9% .146 265.522);--color-indigo-300:oklch(78.5% .115 274.713);--color-indigo-400:oklch(67.3% .182 276.935);--color-indigo-500:oklch(58.5% .233 277.117);--color-indigo-600:oklch(51.1% .262 276.966);--color-indigo-900:oklch(35.9% .144 278.697);--color-purple-50:oklch(97.7% .014 308.299);--color-purple-400:oklch(71.4% .203 305.504);--color-purple-500:oklch(62.7% .265 303.9);--color-purple-600:oklch(55.8% .288 302.321);--color-purple-700:oklch(49.6% .265 301.924);--color-gray-50:oklch(98.5% .002 247.839);--color-gray-100:oklch(96.7% .003 264.542);--color-gray-200:oklch(92.8% .006 264.531);--color-gray-300:oklch(87.2% .01 258.338);--color-gray-400:oklch(70.7% .022 261.325);--color-gray-500:oklch(55.1% .027 264.364);--color-gray-600:oklch(44.6% .03 256.802);--color-gray-700:oklch(37.3% .034 259.733);--color-gray-800:oklch(27.8% .033 256.848);--color-gray-900:oklch(21% .034 264.665);--color-black:#000;--color-white:#fff;--spacing:.25rem;--container-xs:20rem;--container-sm:24rem;--container-md:28rem;--container-xl:36rem;--container-2xl:42rem;--container-6xl:72rem;--container-7xl:80rem;--text-xs:.75rem;--text-xs--line-height:calc(1/.75);--text-sm:.875rem;--text-sm--line-height:calc(1.25/.875);--text-base:1rem;--text-base--line-height: 1.5 ;--text-lg:1.125rem;--text-lg--line-height:calc(1.75/1.125);--text-xl:1.25rem;--text-xl--line-height:calc(1.75/1.25);--text-2xl:1.5rem;--text-2xl--line-height:calc(2/1.5);--text-3xl:1.875rem;--text-3xl--line-height: 1.2 ;--font-weight-medium:500;--font-weight-semibold:600;--font-weight-bold:700;--font-weight-extrabold:800;--tracking-wide:.025em;--tracking-wider:.05em;--radius-md:.375rem;--radius-lg:.5rem;--ease-in-out:cubic-bezier(.4,0,.2,1);--animate-spin:spin 1s linear infinite;--animate-pulse:pulse 2s cubic-bezier(.4,0,.6,1)infinite;--default-transition-duration:.15s;--default-transition-timing-function:cubic-bezier(.4,0,.2,1);--default-font-family:var(--font-sans);--default-mono-font-family:var(--font-mono)}}@layer base{*,:after,:before,::backdrop{box-sizing:border-box;border:0 solid;margin:0;padding:0}::file-selector-button{box-sizing:border-box;border:0 solid;margin:0;padding:0}html,:host{-webkit-text-size-adjust:100%;-moz-tab-size:4;-o-tab-size:4;tab-size:4;line-height:1.5;font-family:var(--default-font-family,ui-sans-serif,system-ui,sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji");font-feature-settings:var(--default-font-feature-settings,normal);font-variation-settings:var(--default-font-variation-settings,normal);-webkit-tap-highlight-color:transparent}hr{height:0;color:inherit;border-top-width:1px}abbr:where([title]){-webkit-text-decoration:underline dotted;text-decoration:underline dotted}h1,h2,h3,h4,h5,h6{font-size:inherit;font-weight:inherit}a{color:inherit;-webkit-text-decoration:inherit;text-decoration:inherit}b,strong{font-weight:bolder}code,kbd,samp,pre{font-family:var(--default-mono-font-family,ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace);font-feature-settings:var(--default-mono-font-feature-settings,normal);font-variation-settings:var(--default-mono-font-variation-settings,normal);font-size:1em}small{font-size:80%}sub,sup{vertical-align:baseline;font-size:75%;line-height:0;position:relative}sub{bottom:-.25em}sup{top:-.5em}table{text-indent:0;border-color:inherit;border-collapse:collapse}:-moz-focusring{outline:auto}progress{vertical-align:baseline}summary{display:list-item}ol,ul,menu{list-style:none}img,svg,video,canvas,audio,iframe,embed,object{vertical-align:middle;display:block}img,video{max-width:100%;height:auto}button,input,select,optgroup,textarea{font:inherit;font-feature-settings:inherit;font-variation-settings:inherit;letter-spacing:inherit;color:inherit;opacity:1;background-color:#0000;border-radius:0}::file-selector-button{font:inherit;font-feature-settings:inherit;font-variation-settings:inherit;letter-spacing:inherit;color:inherit;opacity:1;background-color:#0000;border-radius:0}:where(select:is([multiple],[size])) optgroup{font-weight:bolder}:where(select:is([multiple],[size])) optgroup option{padding-inline-start:20px}::file-selector-button{margin-inline-end:4px}::-moz-placeholder{opacity:1}::placeholder{opacity:1}@supports (not (-webkit-appearance:-apple-pay-button)) or (contain-intrinsic-size:1px){::-moz-placeholder{color:currentColor}::placeholder{color:currentColor}@supports (color:color-mix(in lab,red,red)){::-moz-placeholder{color:color-mix(in oklab,currentcolor 50%,transparent)}::placeholder{color:color-mix(in oklab,currentcolor 50%,transparent)}}}textarea{resize:vertical}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-date-and-time-value{min-height:1lh;text-align:inherit}::-webkit-datetime-edit{display:inline-flex}::-webkit-datetime-edit-fields-wrapper{padding:0}::-webkit-datetime-edit{padding-block:0}::-webkit-datetime-edit-year-field{padding-block:0}::-webkit-datetime-edit-month-field{padding-block:0}::-webkit-datetime-edit-day-field{padding-block:0}::-webkit-datetime-edit-hour-field{padding-block:0}::-webkit-datetime-edit-minute-field{padding-block:0}::-webkit-datetime-edit-second-field{padding-block:0}::-webkit-datetime-edit-millisecond-field{padding-block:0}::-webkit-datetime-edit-meridiem-field{padding-block:0}:-moz-ui-invalid{box-shadow:none}button,input:where([type=button],[type=reset],[type=submit]){-webkit-appearance:button;-moz-appearance:button;appearance:button}::file-selector-button{-webkit-appearance:button;-moz-appearance:button;appearance:button}::-webkit-inner-spin-button{height:auto}::-webkit-outer-spin-button{height:auto}[hidden]:where(:not([hidden=until-found])){display:none!important}html{font-family:ui-sans-serif,system-ui,-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Helvetica Neue,Arial,Noto Sans,sans-serif}}@layer components;@layer utilities{.pointer-events-none{pointer-events:none}.invisible{visibility:hidden}.visible{visibility:visible}.sr-only{clip:rect(0,0,0,0);white-space:nowrap;border-width:0;width:1px;height:1px;margin:-1px;padding:0;position:absolute;overflow:hidden}.absolute{position:absolute}.fixed{position:fixed}.relative{position:relative}.static{position:static}.inset-0{inset:calc(var(--spacing)*0)}.inset-y-0{inset-block:calc(var(--spacing)*0)}.top-0{top:calc(var(--spacing)*0)}.top-1\/2{top:50%}.top-2{top:calc(var(--spacing)*2)}.top-4{top:calc(var(--spacing)*4)}.top-full{top:100%}.right-0{right:calc(var(--spacing)*0)}.right-2{right:calc(var(--spacing)*2)}.right-4{right:calc(var(--spacing)*4)}.right-full{right:100%}.bottom-full{bottom:100%}.left-0{left:calc(var(--spacing)*0)}.left-1\/2{left:50%}.left-full{left:100%}.z-0{z-index:0}.z-10{z-index:10}.z-40{z-index:40}.z-50{z-index:50}.z-\[60\]{z-index:60}.container{width:100%}@media (min-width:640px){.container{max-width:640px}}@media (min-width:768px){.container{max-width:768px}}@media (min-width:1024px){.container{max-width:1024px}}@media (min-width:1280px){.container{max-width:1280px}}@media (min-width:1536px){.container{max-width:1536px}}.mx-1{margin-inline:calc(var(--spacing)*1)}.mx-2{margin-inline:calc(var(--spacing)*2)}.mx-4{margin-inline:calc(var(--spacing)*4)}.mx-auto{margin-inline:auto}.mt-0\.5{margin-top:calc(var(--spacing)*.5)}.mt-1{margin-top:calc(var(--spacing)*1)}.mt-2{margin-top:calc(var(--spacing)*2)}.mt-3{margin-top:calc(var(--spacing)*3)}.mt-4{margin-top:calc(var(--spacing)*4)}.mt-5{margin-top:calc(var(--spacing)*5)}.mt-6{margin-top:calc(var(--spacing)*6)}.mt-8{margin-top:calc(var(--spacing)*8)}.-mr-0\.5{margin-right:calc(var(--spacing)*-.5)}.-mr-1{margin-right:calc(var(--spacing)*-1)}.-mr-12{margin-right:calc(var(--spacing)*-12)}.mr-2{margin-right:calc(var(--spacing)*2)}.mr-2\.5{margin-right:calc(var(--spacing)*2.5)}.mr-3{margin-right:calc(var(--spacing)*3)}.mr-4{margin-right:calc(var(--spacing)*4)}.mb-1{margin-bottom:calc(var(--spacing)*1)}.mb-2{margin-bottom:calc(var(--spacing)*2)}.mb-3{margin-bottom:calc(var(--spacing)*3)}.mb-4{margin-bottom:calc(var(--spacing)*4)}.mb-6{margin-bottom:calc(var(--spacing)*6)}.-ml-0\.5{margin-left:calc(var(--spacing)*-.5)}.-ml-1{margin-left:calc(var(--spacing)*-1)}.ml-1{margin-left:calc(var(--spacing)*1)}.ml-2{margin-left:calc(var(--spacing)*2)}.ml-3{margin-left:calc(var(--spacing)*3)}.ml-4{margin-left:calc(var(--spacing)*4)}.ml-5{margin-left:calc(var(--spacing)*5)}.ml-6{margin-left:calc(var(--spacing)*6)}.block{display:block}.contents{display:contents}.flex{display:flex}.grid{display:grid}.hidden{display:none}.inline{display:inline}.inline-block{display:inline-block}.inline-flex{display:inline-flex}.table{display:table}.h-0{height:calc(var(--spacing)*0)}.h-2{height:calc(var(--spacing)*2)}.h-2\.5{height:calc(var(--spacing)*2.5)}.h-3{height:calc(var(--spacing)*3)}.h-4{height:calc(var(--spacing)*4)}.h-5{height:calc(var(--spacing)*5)}.h-6{height:calc(var(--spacing)*6)}.h-8{height:calc(var(--spacing)*8)}.h-10{height:calc(var(--spacing)*10)}.h-12{height:calc(var(--spacing)*12)}.h-16{height:calc(var(--spacing)*16)}.h-24{height:calc(var(--spacing)*24)}.h-48{height:calc(var(--spacing)*48)}.h-full{height:100%}.max-h-96{max-height:calc(var(--spacing)*96)}.max-h-\[90vh\]{max-height:90vh}.max-h-screen{max-height:100vh}.min-h-0{min-height:calc(var(--spacing)*0)}.min-h-\[38px\]{min-height:38px}.min-h-screen{min-height:100vh}.w-0{width:calc(var(--spacing)*0)}.w-2{width:calc(var(--spacing)*2)}.w-2\.5{width:calc(var(--spacing)*2.5)}.w-3{width:calc(var(--spacing)*3)}.w-4{width:calc(var(--spacing)*4)}.w-5{width:calc(var(--spacing)*5)}.w-6{width:calc(var(--spacing)*6)}.w-8{width:calc(var(--spacing)*8)}.w-10{width:calc(var(--spacing)*10)}.w-12{width:calc(var(--spacing)*12)}.w-16{width:calc(var(--spacing)*16)}.w-20{width:calc(var(--spacing)*20)}.w-64{width:calc(var(--spacing)*64)}.w-80{width:calc(var(--spacing)*80)}.w-auto{width:auto}.w-full{width:100%}.max-w-2xl{max-width:var(--container-2xl)}.max-w-6xl{max-width:var(--container-6xl)}.max-w-7xl{max-width:var(--container-7xl)}.max-w-full{max-width:100%}.max-w-md{max-width:var(--container-md)}.max-w-sm{max-width:var(--container-sm)}.max-w-xl{max-width:var(--container-xl)}.max-w-xs{max-width:var(--container-xs)}.min-w-0{min-width:calc(var(--spacing)*0)}.flex-1{flex:1}.flex-shrink-0{flex-shrink:0}.-translate-x-1\/2{--tw-translate-x: -50% ;translate:var(--tw-translate-x)var(--tw-translate-y)}.-translate-y-1\/2{--tw-translate-y: -50% ;translate:var(--tw-translate-x)var(--tw-translate-y)}.rotate-90{rotate:90deg}.transform{transform:var(--tw-rotate-x,)var(--tw-rotate-y,)var(--tw-rotate-z,)var(--tw-skew-x,)var(--tw-skew-y,)}.animate-pulse{animation:var(--animate-pulse)}.animate-spin{animation:var(--animate-spin)}.cursor-default{cursor:default}.cursor-help{cursor:help}.cursor-not-allowed{cursor:not-allowed}.cursor-pointer{cursor:pointer}.resize-none{resize:none}.list-inside{list-style-position:inside}.list-disc{list-style-type:disc}.appearance-none{-webkit-appearance:none;-moz-appearance:none;appearance:none}.grid-cols-1{grid-template-columns:repeat(1,minmax(0,1fr))}.grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.grid-cols-3{grid-template-columns:repeat(3,minmax(0,1fr))}.flex-col{flex-direction:column}.flex-wrap{flex-wrap:wrap}.items-center{align-items:center}.items-start{align-items:flex-start}.justify-between{justify-content:space-between}.justify-center{justify-content:center}.justify-end{justify-content:flex-end}.gap-0{gap:calc(var(--spacing)*0)}.gap-2{gap:calc(var(--spacing)*2)}.gap-4{gap:calc(var(--spacing)*4)}.gap-5{gap:calc(var(--spacing)*5)}.gap-6{gap:calc(var(--spacing)*6)}:where(.-space-y-px>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(-1px*var(--tw-space-y-reverse));margin-block-end:calc(-1px*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-1>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*1)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*1)*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-2>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*2)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*2)*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-3>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*3)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*3)*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-4>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*4)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*4)*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-6>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*6)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*6)*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-8>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*8)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*8)*calc(1 - var(--tw-space-y-reverse)))}.gap-x-4{-moz-column-gap:calc(var(--spacing)*4);column-gap:calc(var(--spacing)*4)}:where(.-space-x-px>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(-1px*var(--tw-space-x-reverse));margin-inline-end:calc(-1px*calc(1 - var(--tw-space-x-reverse)))}:where(.space-x-1>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(calc(var(--spacing)*1)*var(--tw-space-x-reverse));margin-inline-end:calc(calc(var(--spacing)*1)*calc(1 - var(--tw-space-x-reverse)))}:where(.space-x-2>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(calc(var(--spacing)*2)*var(--tw-space-x-reverse));margin-inline-end:calc(calc(var(--spacing)*2)*calc(1 - var(--tw-space-x-reverse)))}:where(.space-x-3>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(calc(var(--spacing)*3)*var(--tw-space-x-reverse));margin-inline-end:calc(calc(var(--spacing)*3)*calc(1 - var(--tw-space-x-reverse)))}:where(.space-x-4>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(calc(var(--spacing)*4)*var(--tw-space-x-reverse));margin-inline-end:calc(calc(var(--spacing)*4)*calc(1 - var(--tw-space-x-reverse)))}.gap-y-6{row-gap:calc(var(--spacing)*6)}:where(.divide-y>:not(:last-child)){--tw-divide-y-reverse:0;border-bottom-style:var(--tw-border-style);border-top-style:var(--tw-border-style);border-top-width:calc(1px*var(--tw-divide-y-reverse));border-bottom-width:calc(1px*calc(1 - var(--tw-divide-y-reverse)))}:where(.divide-gray-200>:not(:last-child)){border-color:var(--color-gray-200)}.truncate{text-overflow:ellipsis;white-space:nowrap;overflow:hidden}.overflow-hidden{overflow:hidden}.overflow-x-auto{overflow-x:auto}.overflow-y-auto{overflow-y:auto}.scroll-smooth{scroll-behavior:smooth}.rounded{border-radius:.25rem}.rounded-full{border-radius:3.40282e38px}.rounded-lg{border-radius:var(--radius-lg)}.rounded-md{border-radius:var(--radius-md)}.rounded-none{border-radius:0}.rounded-t-md{border-top-left-radius:var(--radius-md);border-top-right-radius:var(--radius-md)}.rounded-l-md{border-top-left-radius:var(--radius-md);border-bottom-left-radius:var(--radius-md)}.rounded-l-none{border-top-left-radius:0;border-bottom-left-radius:0}.rounded-r-md{border-top-right-radius:var(--radius-md);border-bottom-right-radius:var(--radius-md)}.rounded-r-none{border-top-right-radius:0;border-bottom-right-radius:0}.rounded-b-md{border-bottom-right-radius:var(--radius-md);border-bottom-left-radius:var(--radius-md)}.border{border-style:var(--tw-border-style);border-width:1px}.border-2{border-style:var(--tw-border-style);border-width:2px}.border-4{border-style:var(--tw-border-style);border-width:4px}.border-t{border-top-style:var(--tw-border-style);border-top-width:1px}.border-r{border-right-style:var(--tw-border-style);border-right-width:1px}.border-b{border-bottom-style:var(--tw-border-style);border-bottom-width:1px}.border-b-2{border-bottom-style:var(--tw-border-style);border-bottom-width:2px}.border-l-0{border-left-style:var(--tw-border-style);border-left-width:0}.border-dashed{--tw-border-style:dashed;border-style:dashed}.border-blue-200{border-color:var(--color-blue-200)}.border-blue-500{border-color:var(--color-blue-500)}.border-blue-600{border-color:var(--color-blue-600)}.border-gray-100{border-color:var(--color-gray-100)}.border-gray-200{border-color:var(--color-gray-200)}.border-gray-300{border-color:var(--color-gray-300)}.border-green-200{border-color:var(--color-green-200)}.border-red-200{border-color:var(--color-red-200)}.border-red-300{border-color:var(--color-red-300)}.border-transparent{border-color:#0000}.border-white{border-color:var(--color-white)}.border-yellow-200{border-color:var(--color-yellow-200)}.border-t-gray-900{border-top-color:var(--color-gray-900)}.border-r-gray-900{border-right-color:var(--color-gray-900)}.border-b-gray-900{border-bottom-color:var(--color-gray-900)}.border-l-gray-900{border-left-color:var(--color-gray-900)}.bg-black\/30{background-color:#0000004d}@supports (color:color-mix(in lab,red,red)){.bg-black\/30{background-color:color-mix(in oklab,var(--color-black)30%,transparent)}}.bg-blue-50{background-color:var(--color-blue-50)}.bg-blue-100{background-color:var(--color-blue-100)}.bg-blue-500{background-color:var(--color-blue-500)}.bg-blue-600{background-color:var(--color-blue-600)}.bg-gray-50{background-color:var(--color-gray-50)}.bg-gray-100{background-color:var(--color-gray-100)}.bg-gray-200{background-color:var(--color-gray-200)}.bg-gray-400{background-color:var(--color-gray-400)}.bg-gray-500{background-color:var(--color-gray-500)}.bg-gray-900{background-color:var(--color-gray-900)}.bg-green-50{background-color:var(--color-green-50)}.bg-green-100{background-color:var(--color-green-100)}.bg-green-500{background-color:var(--color-green-500)}.bg-orange-50{background-color:var(--color-orange-50)}.bg-purple-50{background-color:var(--color-purple-50)}.bg-purple-500{background-color:var(--color-purple-500)}.bg-red-50{background-color:var(--color-red-50)}.bg-red-100{background-color:var(--color-red-100)}.bg-red-500{background-color:var(--color-red-500)}.bg-red-600{background-color:var(--color-red-600)}.bg-red-900{background-color:var(--color-red-900)}.bg-white{background-color:var(--color-white)}.bg-yellow-50{background-color:var(--color-yellow-50)}.bg-yellow-100{background-color:var(--color-yellow-100)}.bg-yellow-500{background-color:var(--color-yellow-500)}.bg-gradient-to-r{--tw-gradient-position:to right in oklab;background-image:linear-gradient(var(--tw-gradient-stops))}.from-gray-50{--tw-gradient-from:var(--color-gray-50);--tw-gradient-stops:var(--tw-gradient-via-stops,var(--tw-gradient-position),var(--tw-gradient-from)var(--tw-gradient-from-position),var(--tw-gradient-to)var(--tw-gradient-to-position))}.to-white{--tw-gradient-to:var(--color-white);--tw-gradient-stops:var(--tw-gradient-via-stops,var(--tw-gradient-position),var(--tw-gradient-from)var(--tw-gradient-from-position),var(--tw-gradient-to)var(--tw-gradient-to-position))}.p-1{padding:calc(var(--spacing)*1)}.p-2{padding:calc(var(--spacing)*2)}.p-3{padding:calc(var(--spacing)*3)}.p-4{padding:calc(var(--spacing)*4)}.p-5{padding:calc(var(--spacing)*5)}.p-6{padding:calc(var(--spacing)*6)}.px-2{padding-inline:calc(var(--spacing)*2)}.px-2\.5{padding-inline:calc(var(--spacing)*2.5)}.px-3{padding-inline:calc(var(--spacing)*3)}.px-4{padding-inline:calc(var(--spacing)*4)}.px-6{padding-inline:calc(var(--spacing)*6)}.py-0\.5{padding-block:calc(var(--spacing)*.5)}.py-1{padding-block:calc(var(--spacing)*1)}.py-2{padding-block:calc(var(--spacing)*2)}.py-3{padding-block:calc(var(--spacing)*3)}.py-4{padding-block:calc(var(--spacing)*4)}.py-5{padding-block:calc(var(--spacing)*5)}.py-6{padding-block:calc(var(--spacing)*6)}.py-8{padding-block:calc(var(--spacing)*8)}.py-12{padding-block:calc(var(--spacing)*12)}.pt-2{padding-top:calc(var(--spacing)*2)}.pt-4{padding-top:calc(var(--spacing)*4)}.pt-5{padding-top:calc(var(--spacing)*5)}.pt-6{padding-top:calc(var(--spacing)*6)}.pt-20{padding-top:calc(var(--spacing)*20)}.pr-3{padding-right:calc(var(--spacing)*3)}.pr-8{padding-right:calc(var(--spacing)*8)}.pb-2{padding-bottom:calc(var(--spacing)*2)}.pb-4{padding-bottom:calc(var(--spacing)*4)}.pl-2{padding-left:calc(var(--spacing)*2)}.pl-3{padding-left:calc(var(--spacing)*3)}.pl-10{padding-left:calc(var(--spacing)*10)}.text-center{text-align:center}.text-left{text-align:left}.text-right{text-align:right}.font-mono{font-family:var(--font-mono)}.text-2xl{font-size:var(--text-2xl);line-height:var(--tw-leading,var(--text-2xl--line-height))}.text-3xl{font-size:var(--text-3xl);line-height:var(--tw-leading,var(--text-3xl--line-height))}.text-base{font-size:var(--text-base);line-height:var(--tw-leading,var(--text-base--line-height))}.text-lg{font-size:var(--text-lg);line-height:var(--tw-leading,var(--text-lg--line-height))}.text-sm{font-size:var(--text-sm);line-height:var(--tw-leading,var(--text-sm--line-height))}.text-xl{font-size:var(--text-xl);line-height:var(--tw-leading,var(--text-xl--line-height))}.text-xs{font-size:var(--text-xs);line-height:var(--tw-leading,var(--text-xs--line-height))}.leading-5{--tw-leading:calc(var(--spacing)*5);line-height:calc(var(--spacing)*5)}.leading-6{--tw-leading:calc(var(--spacing)*6);line-height:calc(var(--spacing)*6)}.font-bold{--tw-font-weight:var(--font-weight-bold);font-weight:var(--font-weight-bold)}.font-extrabold{--tw-font-weight:var(--font-weight-extrabold);font-weight:var(--font-weight-extrabold)}.font-medium{--tw-font-weight:var(--font-weight-medium);font-weight:var(--font-weight-medium)}.font-semibold{--tw-font-weight:var(--font-weight-semibold);font-weight:var(--font-weight-semibold)}.tracking-wide{--tw-tracking:var(--tracking-wide);letter-spacing:var(--tracking-wide)}.tracking-wider{--tw-tracking:var(--tracking-wider);letter-spacing:var(--tracking-wider)}.break-all{word-break:break-all}.text-black{color:var(--color-black)}.text-blue-400{color:var(--color-blue-400)}.text-blue-600{color:var(--color-blue-600)}.text-blue-700{color:var(--color-blue-700)}.text-blue-800{color:var(--color-blue-800)}.text-gray-300{color:var(--color-gray-300)}.text-gray-400{color:var(--color-gray-400)}.text-gray-500{color:var(--color-gray-500)}.text-gray-600{color:var(--color-gray-600)}.text-gray-700{color:var(--color-gray-700)}.text-gray-800{color:var(--color-gray-800)}.text-gray-900{color:var(--color-gray-900)}.text-green-400{color:var(--color-green-400)}.text-green-500{color:var(--color-green-500)}.text-green-600{color:var(--color-green-600)}.text-green-700{color:var(--color-green-700)}.text-green-800{color:var(--color-green-800)}.text-indigo-600{color:var(--color-indigo-600)}.text-orange-700{color:var(--color-orange-700)}.text-purple-600{color:var(--color-purple-600)}.text-purple-700{color:var(--color-purple-700)}.text-red-400{color:var(--color-red-400)}.text-red-500{color:var(--color-red-500)}.text-red-600{color:var(--color-red-600)}.text-red-700{color:var(--color-red-700)}.text-red-800{color:var(--color-red-800)}.text-red-900{color:var(--color-red-900)}.text-white{color:var(--color-white)}.text-yellow-400{color:var(--color-yellow-400)}.text-yellow-600{color:var(--color-yellow-600)}.text-yellow-700{color:var(--color-yellow-700)}.text-yellow-800{color:var(--color-yellow-800)}.capitalize{text-transform:capitalize}.uppercase{text-transform:uppercase}.italic{font-style:italic}.placeholder-gray-400::-moz-placeholder{color:var(--color-gray-400)}.placeholder-gray-400::placeholder{color:var(--color-gray-400)}.placeholder-gray-500::-moz-placeholder{color:var(--color-gray-500)}.placeholder-gray-500::placeholder{color:var(--color-gray-500)}.opacity-0{opacity:0}.opacity-25{opacity:.25}.opacity-50{opacity:.5}.opacity-75{opacity:.75}.shadow{--tw-shadow:0 1px 3px 0 var(--tw-shadow-color,#0000001a),0 1px 2px -1px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.shadow-lg{--tw-shadow:0 10px 15px -3px var(--tw-shadow-color,#0000001a),0 4px 6px -4px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.shadow-sm{--tw-shadow:0 1px 3px 0 var(--tw-shadow-color,#0000001a),0 1px 2px -1px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.shadow-xl{--tw-shadow:0 20px 25px -5px var(--tw-shadow-color,#0000001a),0 8px 10px -6px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.ring,.ring-1{--tw-ring-shadow:var(--tw-ring-inset,)0 0 0 calc(1px + var(--tw-ring-offset-width))var(--tw-ring-color,currentcolor);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.ring-blue-200{--tw-ring-color:var(--color-blue-200)}.ring-blue-600\/20{--tw-ring-color:#155dfc33}@supports (color:color-mix(in lab,red,red)){.ring-blue-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-blue-600)20%,transparent)}}.ring-gray-200{--tw-ring-color:var(--color-gray-200)}.ring-gray-500\/20{--tw-ring-color:#6a728233}@supports (color:color-mix(in lab,red,red)){.ring-gray-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-gray-500)20%,transparent)}}.ring-gray-600\/20{--tw-ring-color:#4a556533}@supports (color:color-mix(in lab,red,red)){.ring-gray-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-gray-600)20%,transparent)}}.ring-green-200{--tw-ring-color:var(--color-green-200)}.ring-green-600\/20{--tw-ring-color:#00a54433}@supports (color:color-mix(in lab,red,red)){.ring-green-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-green-600)20%,transparent)}}.ring-orange-600\/20{--tw-ring-color:#f0510033}@supports (color:color-mix(in lab,red,red)){.ring-orange-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-orange-600)20%,transparent)}}.ring-purple-600\/20{--tw-ring-color:#9810fa33}@supports (color:color-mix(in lab,red,red)){.ring-purple-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-purple-600)20%,transparent)}}.ring-red-200{--tw-ring-color:var(--color-red-200)}.ring-red-600\/20{--tw-ring-color:#e4001433}@supports (color:color-mix(in lab,red,red)){.ring-red-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-red-600)20%,transparent)}}.ring-yellow-200{--tw-ring-color:var(--color-yellow-200)}.ring-yellow-600\/20{--tw-ring-color:#cd890033}@supports (color:color-mix(in lab,red,red)){.ring-yellow-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-yellow-600)20%,transparent)}}.filter{filter:var(--tw-blur,)var(--tw-brightness,)var(--tw-contrast,)var(--tw-grayscale,)var(--tw-hue-rotate,)var(--tw-invert,)var(--tw-saturate,)var(--tw-sepia,)var(--tw-drop-shadow,)}.transition{transition-property:color,background-color,border-color,outline-color,text-decoration-color,fill,stroke,--tw-gradient-from,--tw-gradient-via,--tw-gradient-to,opacity,box-shadow,transform,translate,scale,rotate,filter,-webkit-backdrop-filter,backdrop-filter,display,visibility,content-visibility,overlay,pointer-events;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.transition-all{transition-property:all;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.transition-colors{transition-property:color,background-color,border-color,outline-color,text-decoration-color,fill,stroke,--tw-gradient-from,--tw-gradient-via,--tw-gradient-to;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.transition-shadow{transition-property:box-shadow;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.transition-transform{transition-property:transform,translate,scale,rotate;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.duration-200{--tw-duration:.2s;transition-duration:.2s}.duration-300{--tw-duration:.3s;transition-duration:.3s}.ease-in-out{--tw-ease:var(--ease-in-out);transition-timing-function:var(--ease-in-out)}.ring-inset{--tw-ring-inset:inset}@media (hover:hover){.group-hover\:visible:is(:where(.group):hover *){visibility:visible}.group-hover\:opacity-100:is(:where(.group):hover *){opacity:1}}.first\:rounded-l-md:first-child{border-top-left-radius:var(--radius-md);border-bottom-left-radius:var(--radius-md)}.first\:border-l:first-child{border-left-style:var(--tw-border-style);border-left-width:1px}@media (hover:hover){.hover\:scale-105:hover{--tw-scale-x:105%;--tw-scale-y:105%;--tw-scale-z:105%;scale:var(--tw-scale-x)var(--tw-scale-y)}.hover\:border-blue-400:hover{border-color:var(--color-blue-400)}.hover\:border-gray-400:hover{border-color:var(--color-gray-400)}.hover\:bg-blue-200:hover{background-color:var(--color-blue-200)}.hover\:bg-blue-700:hover{background-color:var(--color-blue-700)}.hover\:bg-gray-50:hover{background-color:var(--color-gray-50)}.hover\:bg-gray-200:hover{background-color:var(--color-gray-200)}.hover\:bg-red-200:hover{background-color:var(--color-red-200)}.hover\:bg-red-700:hover{background-color:var(--color-red-700)}.hover\:text-blue-500:hover{color:var(--color-blue-500)}.hover\:text-blue-600:hover{color:var(--color-blue-600)}.hover\:text-gray-600:hover{color:var(--color-gray-600)}.hover\:text-gray-700:hover{color:var(--color-gray-700)}.hover\:text-gray-800:hover{color:var(--color-gray-800)}.hover\:text-gray-900:hover{color:var(--color-gray-900)}.hover\:text-green-500:hover{color:var(--color-green-500)}.hover\:text-green-900:hover{color:var(--color-green-900)}.hover\:text-indigo-900:hover{color:var(--color-indigo-900)}.hover\:text-red-500:hover{color:var(--color-red-500)}.hover\:text-red-900:hover{color:var(--color-red-900)}.hover\:text-yellow-300:hover{color:var(--color-yellow-300)}.hover\:text-yellow-500:hover{color:var(--color-yellow-500)}.hover\:underline:hover{text-decoration-line:underline}.hover\:shadow-md:hover{--tw-shadow:0 4px 6px -1px var(--tw-shadow-color,#0000001a),0 2px 4px -2px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.hover\:shadow-sm:hover{--tw-shadow:0 1px 3px 0 var(--tw-shadow-color,#0000001a),0 1px 2px -1px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}}.focus\:z-10:focus{z-index:10}.focus\:border-blue-500:focus{border-color:var(--color-blue-500)}.focus\:bg-red-200:focus{background-color:var(--color-red-200)}.focus\:placeholder-gray-400:focus::-moz-placeholder{color:var(--color-gray-400)}.focus\:placeholder-gray-400:focus::placeholder{color:var(--color-gray-400)}.focus\:ring-1:focus{--tw-ring-shadow:var(--tw-ring-inset,)0 0 0 calc(1px + var(--tw-ring-offset-width))var(--tw-ring-color,currentcolor);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.focus\:ring-2:focus{--tw-ring-shadow:var(--tw-ring-inset,)0 0 0 calc(2px + var(--tw-ring-offset-width))var(--tw-ring-color,currentcolor);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.focus\:ring-blue-500:focus{--tw-ring-color:var(--color-blue-500)}.focus\:ring-gray-500:focus{--tw-ring-color:var(--color-gray-500)}.focus\:ring-green-500:focus{--tw-ring-color:var(--color-green-500)}.focus\:ring-indigo-500:focus{--tw-ring-color:var(--color-indigo-500)}.focus\:ring-red-500:focus{--tw-ring-color:var(--color-red-500)}.focus\:ring-white:focus{--tw-ring-color:var(--color-white)}.focus\:ring-yellow-500:focus{--tw-ring-color:var(--color-yellow-500)}.focus\:ring-offset-2:focus{--tw-ring-offset-width:2px;--tw-ring-offset-shadow:var(--tw-ring-inset,)0 0 0 var(--tw-ring-offset-width)var(--tw-ring-offset-color)}.focus\:outline-none:focus{--tw-outline-style:none;outline-style:none}.focus\:ring-inset:focus{--tw-ring-inset:inset}.disabled\:cursor-not-allowed:disabled{cursor:not-allowed}.disabled\:bg-gray-400:disabled{background-color:var(--color-gray-400)}.disabled\:opacity-50:disabled{opacity:.5}@media (hover:hover){.disabled\:hover\:bg-gray-400:disabled:hover{background-color:var(--color-gray-400)}}@media (min-width:640px){.sm\:mx-auto{margin-inline:auto}.sm\:mt-0{margin-top:calc(var(--spacing)*0)}.sm\:ml-4{margin-left:calc(var(--spacing)*4)}.sm\:block{display:block}.sm\:flex{display:flex}.sm\:hidden{display:none}.sm\:w-full{width:100%}.sm\:max-w-md{max-width:var(--container-md)}.sm\:flex-1{flex:1}.sm\:grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.sm\:items-center{align-items:center}.sm\:justify-between{justify-content:space-between}.sm\:rounded-lg{border-radius:var(--radius-lg)}.sm\:p-6{padding:calc(var(--spacing)*6)}.sm\:px-6{padding-inline:calc(var(--spacing)*6)}.sm\:px-10{padding-inline:calc(var(--spacing)*10)}.sm\:text-sm{font-size:var(--text-sm);line-height:var(--tw-leading,var(--text-sm--line-height))}}@media (min-width:768px){.md\:ml-2{margin-left:calc(var(--spacing)*2)}.md\:grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.md\:grid-cols-3{grid-template-columns:repeat(3,minmax(0,1fr))}:where(.md\:space-x-3>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(calc(var(--spacing)*3)*var(--tw-space-x-reverse));margin-inline-end:calc(calc(var(--spacing)*3)*calc(1 - var(--tw-space-x-reverse)))}}@media (min-width:1024px){.lg\:fixed{position:fixed}.lg\:inset-y-0{inset-block:calc(var(--spacing)*0)}.lg\:flex{display:flex}.lg\:hidden{display:none}.lg\:w-64{width:calc(var(--spacing)*64)}.lg\:grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.lg\:grid-cols-3{grid-template-columns:repeat(3,minmax(0,1fr))}.lg\:grid-cols-4{grid-template-columns:repeat(4,minmax(0,1fr))}.lg\:flex-col{flex-direction:column}.lg\:px-8{padding-inline:calc(var(--spacing)*8)}.lg\:pt-6{padding-top:calc(var(--spacing)*6)}.lg\:pl-64{padding-left:calc(var(--spacing)*64)}}.dark .dark\:block{display:block}.dark .dark\:hidden{display:none}:where(.dark .dark\:divide-gray-700>:not(:last-child)){border-color:var(--color-gray-700)}.dark .dark\:border-blue-700{border-color:var(--color-blue-700)}.dark .dark\:border-blue-800{border-color:var(--color-blue-800)}.dark .dark\:border-gray-600{border-color:var(--color-gray-600)}.dark .dark\:border-gray-700{border-color:var(--color-gray-700)}.dark .dark\:border-green-700{border-color:var(--color-green-700)}.dark .dark\:border-red-600{border-color:var(--color-red-600)}.dark .dark\:border-red-700{border-color:var(--color-red-700)}.dark .dark\:border-red-800{border-color:var(--color-red-800)}.dark .dark\:border-yellow-700{border-color:var(--color-yellow-700)}.dark .dark\:border-yellow-800{border-color:var(--color-yellow-800)}.dark .dark\:bg-black\/50{background-color:#00000080}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-black\/50{background-color:color-mix(in oklab,var(--color-black)50%,transparent)}}.dark .dark\:bg-blue-500\/10{background-color:#3080ff1a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-blue-500\/10{background-color:color-mix(in oklab,var(--color-blue-500)10%,transparent)}}.dark .dark\:bg-blue-900{background-color:var(--color-blue-900)}.dark .dark\:bg-blue-900\/20{background-color:#1c398e33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-blue-900\/20{background-color:color-mix(in oklab,var(--color-blue-900)20%,transparent)}}.dark .dark\:bg-blue-900\/50{background-color:#1c398e80}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-blue-900\/50{background-color:color-mix(in oklab,var(--color-blue-900)50%,transparent)}}.dark .dark\:bg-gray-500\/10{background-color:#6a72821a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-gray-500\/10{background-color:color-mix(in oklab,var(--color-gray-500)10%,transparent)}}.dark .dark\:bg-gray-600{background-color:var(--color-gray-600)}.dark .dark\:bg-gray-700{background-color:var(--color-gray-700)}.dark .dark\:bg-gray-800{background-color:var(--color-gray-800)}.dark .dark\:bg-gray-800\/50{background-color:#1e293980}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-gray-800\/50{background-color:color-mix(in oklab,var(--color-gray-800)50%,transparent)}}.dark .dark\:bg-gray-900{background-color:var(--color-gray-900)}.dark .dark\:bg-gray-900\/50{background-color:#10182880}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-gray-900\/50{background-color:color-mix(in oklab,var(--color-gray-900)50%,transparent)}}.dark .dark\:bg-green-500\/10{background-color:#00c7581a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-green-500\/10{background-color:color-mix(in oklab,var(--color-green-500)10%,transparent)}}.dark .dark\:bg-green-900{background-color:var(--color-green-900)}.dark .dark\:bg-green-900\/50{background-color:#0d542b80}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-green-900\/50{background-color:color-mix(in oklab,var(--color-green-900)50%,transparent)}}.dark .dark\:bg-orange-500\/10{background-color:#fe6e001a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-orange-500\/10{background-color:color-mix(in oklab,var(--color-orange-500)10%,transparent)}}.dark .dark\:bg-purple-500\/10{background-color:#ac4bff1a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-purple-500\/10{background-color:color-mix(in oklab,var(--color-purple-500)10%,transparent)}}.dark .dark\:bg-red-500\/10{background-color:#fb2c361a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-red-500\/10{background-color:color-mix(in oklab,var(--color-red-500)10%,transparent)}}.dark .dark\:bg-red-700{background-color:var(--color-red-700)}.dark .dark\:bg-red-800{background-color:var(--color-red-800)}.dark .dark\:bg-red-900{background-color:var(--color-red-900)}.dark .dark\:bg-red-900\/20{background-color:#82181a33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-red-900\/20{background-color:color-mix(in oklab,var(--color-red-900)20%,transparent)}}.dark .dark\:bg-red-900\/50{background-color:#82181a80}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-red-900\/50{background-color:color-mix(in oklab,var(--color-red-900)50%,transparent)}}.dark .dark\:bg-yellow-500\/10{background-color:#edb2001a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-yellow-500\/10{background-color:color-mix(in oklab,var(--color-yellow-500)10%,transparent)}}.dark .dark\:bg-yellow-900{background-color:var(--color-yellow-900)}.dark .dark\:bg-yellow-900\/20{background-color:#733e0a33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-yellow-900\/20{background-color:color-mix(in oklab,var(--color-yellow-900)20%,transparent)}}.dark .dark\:from-gray-800{--tw-gradient-from:var(--color-gray-800);--tw-gradient-stops:var(--tw-gradient-via-stops,var(--tw-gradient-position),var(--tw-gradient-from)var(--tw-gradient-from-position),var(--tw-gradient-to)var(--tw-gradient-to-position))}.dark .dark\:to-gray-700{--tw-gradient-to:var(--color-gray-700);--tw-gradient-stops:var(--tw-gradient-via-stops,var(--tw-gradient-position),var(--tw-gradient-from)var(--tw-gradient-from-position),var(--tw-gradient-to)var(--tw-gradient-to-position))}.dark .dark\:text-blue-200{color:var(--color-blue-200)}.dark .dark\:text-blue-300{color:var(--color-blue-300)}.dark .dark\:text-blue-400{color:var(--color-blue-400)}.dark .dark\:text-gray-200{color:var(--color-gray-200)}.dark .dark\:text-gray-300{color:var(--color-gray-300)}.dark .dark\:text-gray-400{color:var(--color-gray-400)}.dark .dark\:text-gray-500{color:var(--color-gray-500)}.dark .dark\:text-green-200{color:var(--color-green-200)}.dark .dark\:text-green-300{color:var(--color-green-300)}.dark .dark\:text-green-400{color:var(--color-green-400)}.dark .dark\:text-indigo-400{color:var(--color-indigo-400)}.dark .dark\:text-orange-400{color:var(--color-orange-400)}.dark .dark\:text-purple-400{color:var(--color-purple-400)}.dark .dark\:text-red-100{color:var(--color-red-100)}.dark .dark\:text-red-200{color:var(--color-red-200)}.dark .dark\:text-red-300{color:var(--color-red-300)}.dark .dark\:text-red-400{color:var(--color-red-400)}.dark .dark\:text-white{color:var(--color-white)}.dark .dark\:text-yellow-200{color:var(--color-yellow-200)}.dark .dark\:text-yellow-300{color:var(--color-yellow-300)}.dark .dark\:text-yellow-400{color:var(--color-yellow-400)}.dark .dark\:placeholder-gray-400::-moz-placeholder{color:var(--color-gray-400)}.dark .dark\:placeholder-gray-400::placeholder{color:var(--color-gray-400)}.dark .dark\:placeholder-gray-500::-moz-placeholder{color:var(--color-gray-500)}.dark .dark\:placeholder-gray-500::placeholder{color:var(--color-gray-500)}.dark .dark\:ring-blue-400\/20{--tw-ring-color:#54a2ff33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-blue-400\/20{--tw-ring-color:color-mix(in oklab,var(--color-blue-400)20%,transparent)}}.dark .dark\:ring-blue-400\/30{--tw-ring-color:#54a2ff4d}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-blue-400\/30{--tw-ring-color:color-mix(in oklab,var(--color-blue-400)30%,transparent)}}.dark .dark\:ring-blue-500\/20{--tw-ring-color:#3080ff33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-blue-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-blue-500)20%,transparent)}}.dark .dark\:ring-gray-400\/20{--tw-ring-color:#99a1af33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-gray-400\/20{--tw-ring-color:color-mix(in oklab,var(--color-gray-400)20%,transparent)}}.dark .dark\:ring-gray-400\/30{--tw-ring-color:#99a1af4d}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-gray-400\/30{--tw-ring-color:color-mix(in oklab,var(--color-gray-400)30%,transparent)}}.dark .dark\:ring-gray-500\/20{--tw-ring-color:#6a728233}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-gray-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-gray-500)20%,transparent)}}.dark .dark\:ring-green-400\/20{--tw-ring-color:#05df7233}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-green-400\/20{--tw-ring-color:color-mix(in oklab,var(--color-green-400)20%,transparent)}}.dark .dark\:ring-green-400\/30{--tw-ring-color:#05df724d}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-green-400\/30{--tw-ring-color:color-mix(in oklab,var(--color-green-400)30%,transparent)}}.dark .dark\:ring-green-500\/20{--tw-ring-color:#00c75833}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-green-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-green-500)20%,transparent)}}.dark .dark\:ring-orange-500\/20{--tw-ring-color:#fe6e0033}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-orange-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-orange-500)20%,transparent)}}.dark .dark\:ring-purple-500\/20{--tw-ring-color:#ac4bff33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-purple-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-purple-500)20%,transparent)}}.dark .dark\:ring-red-400\/20{--tw-ring-color:#ff656833}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-red-400\/20{--tw-ring-color:color-mix(in oklab,var(--color-red-400)20%,transparent)}}.dark .dark\:ring-red-400\/30{--tw-ring-color:#ff65684d}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-red-400\/30{--tw-ring-color:color-mix(in oklab,var(--color-red-400)30%,transparent)}}.dark .dark\:ring-red-500\/20{--tw-ring-color:#fb2c3633}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-red-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-red-500)20%,transparent)}}.dark .dark\:ring-yellow-400\/30{--tw-ring-color:#fac8004d}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-yellow-400\/30{--tw-ring-color:color-mix(in oklab,var(--color-yellow-400)30%,transparent)}}.dark .dark\:ring-yellow-500\/20{--tw-ring-color:#edb20033}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-yellow-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-yellow-500)20%,transparent)}}@media (hover:hover){.dark .dark\:hover\:border-blue-400:hover{border-color:var(--color-blue-400)}.dark .dark\:hover\:border-gray-500:hover{border-color:var(--color-gray-500)}.dark .dark\:hover\:bg-blue-800:hover{background-color:var(--color-blue-800)}.dark .dark\:hover\:bg-gray-500:hover{background-color:var(--color-gray-500)}.dark .dark\:hover\:bg-gray-600:hover{background-color:var(--color-gray-600)}.dark .dark\:hover\:bg-gray-700:hover{background-color:var(--color-gray-700)}.dark .dark\:hover\:bg-gray-800:hover{background-color:var(--color-gray-800)}.dark .dark\:hover\:bg-red-700:hover{background-color:var(--color-red-700)}.dark .dark\:hover\:bg-red-800:hover{background-color:var(--color-red-800)}.dark .dark\:hover\:text-blue-300:hover{color:var(--color-blue-300)}.dark .dark\:hover\:text-gray-100:hover{color:var(--color-gray-100)}.dark .dark\:hover\:text-gray-300:hover{color:var(--color-gray-300)}.dark .dark\:hover\:text-green-300:hover{color:var(--color-green-300)}.dark .dark\:hover\:text-indigo-300:hover{color:var(--color-indigo-300)}.dark .dark\:hover\:text-red-300:hover{color:var(--color-red-300)}.dark .dark\:hover\:text-white:hover{color:var(--color-white)}}.dark .dark\:focus\:bg-red-700:focus{background-color:var(--color-red-700)}.dark .dark\:focus\:ring-offset-gray-900:focus{--tw-ring-offset-color:var(--color-gray-900)}}@property --tw-translate-x{syntax:"*";inherits:false;initial-value:0}@property --tw-translate-y{syntax:"*";inherits:false;initial-value:0}@property --tw-translate-z{syntax:"*";inherits:false;initial-value:0}@property --tw-rotate-x{syntax:"*";inherits:false}@property --tw-rotate-y{syntax:"*";inherits:false}@property --tw-rotate-z{syntax:"*";inherits:false}@property --tw-skew-x{syntax:"*";inherits:false}@property --tw-skew-y{syntax:"*";inherits:false}@property --tw-space-y-reverse{syntax:"*";inherits:false;initial-value:0}@property --tw-space-x-reverse{syntax:"*";inherits:false;initial-value:0}@property --tw-divide-y-reverse{syntax:"*";inherits:false;initial-value:0}@property --tw-border-style{syntax:"*";inherits:false;initial-value:solid}@property --tw-gradient-position{syntax:"*";inherits:false}@property --tw-gradient-from{syntax:"";inherits:false;initial-value:#0000}@property --tw-gradient-via{syntax:"";inherits:false;initial-value:#0000}@property --tw-gradient-to{syntax:"";inherits:false;initial-value:#0000}@property --tw-gradient-stops{syntax:"*";inherits:false}@property --tw-gradient-via-stops{syntax:"*";inherits:false}@property --tw-gradient-from-position{syntax:"";inherits:false;initial-value:0%}@property --tw-gradient-via-position{syntax:"";inherits:false;initial-value:50%}@property --tw-gradient-to-position{syntax:"";inherits:false;initial-value:100%}@property --tw-leading{syntax:"*";inherits:false}@property --tw-font-weight{syntax:"*";inherits:false}@property --tw-tracking{syntax:"*";inherits:false}@property --tw-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-shadow-color{syntax:"*";inherits:false}@property --tw-shadow-alpha{syntax:"";inherits:false;initial-value:100%}@property --tw-inset-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-inset-shadow-color{syntax:"*";inherits:false}@property --tw-inset-shadow-alpha{syntax:"";inherits:false;initial-value:100%}@property --tw-ring-color{syntax:"*";inherits:false}@property --tw-ring-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-inset-ring-color{syntax:"*";inherits:false}@property --tw-inset-ring-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-ring-inset{syntax:"*";inherits:false}@property --tw-ring-offset-width{syntax:"";inherits:false;initial-value:0}@property --tw-ring-offset-color{syntax:"*";inherits:false;initial-value:#fff}@property --tw-ring-offset-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-blur{syntax:"*";inherits:false}@property --tw-brightness{syntax:"*";inherits:false}@property --tw-contrast{syntax:"*";inherits:false}@property --tw-grayscale{syntax:"*";inherits:false}@property --tw-hue-rotate{syntax:"*";inherits:false}@property --tw-invert{syntax:"*";inherits:false}@property --tw-opacity{syntax:"*";inherits:false}@property --tw-saturate{syntax:"*";inherits:false}@property --tw-sepia{syntax:"*";inherits:false}@property --tw-drop-shadow{syntax:"*";inherits:false}@property --tw-drop-shadow-color{syntax:"*";inherits:false}@property --tw-drop-shadow-alpha{syntax:"";inherits:false;initial-value:100%}@property --tw-drop-shadow-size{syntax:"*";inherits:false}@property --tw-duration{syntax:"*";inherits:false}@property --tw-ease{syntax:"*";inherits:false}@property --tw-scale-x{syntax:"*";inherits:false;initial-value:1}@property --tw-scale-y{syntax:"*";inherits:false;initial-value:1}@property --tw-scale-z{syntax:"*";inherits:false;initial-value:1}@keyframes spin{to{transform:rotate(360deg)}}@keyframes pulse{50%{opacity:.5}} diff --git a/webapp/assets/_app/immutable/chunks/0ZGtv6cq.js b/webapp/assets/_app/immutable/chunks/0ZGtv6cq.js deleted file mode 100644 index 99e96ab7..00000000 --- a/webapp/assets/_app/immutable/chunks/0ZGtv6cq.js +++ /dev/null @@ -1 +0,0 @@ -import"./DsnmJJEf.js";import{i as W}from"./zNh6Oe5P.js";import{f as S,j as t,k as p,r as a,t as L,v as _,c as u,z as N,D as A,p as X,u as T,n as P,d as Y}from"./sWNKMed7.js";import{p as s,i as I}from"./Ccl3fNd2.js";import{s as Z,h as $,B as F,d as B,c as ee}from"./DVl4ZBgx.js";import{D as te,G as ae,a as se}from"./DCYYzf48.js";import{E as le}from"./DAWfW-VQ.js";import{S as G}from"./BJXodF8n.js";var ne=S('
                '),ie=S('
                '),re=S('

                ');function ge(j,e){let n=s(e,"title",8),E=s(e,"subtitle",8),b=s(e,"forgeIcon",8,""),f=s(e,"onEdit",8,null),h=s(e,"onDelete",8,null),k=s(e,"editLabel",8,"Edit"),z=s(e,"deleteLabel",8,"Delete"),g=s(e,"titleClass",8,"");var c=re(),v=t(c),m=t(v),y=t(m),C=t(y);{var H=i=>{var r=ne(),w=t(r);$(w,b),a(r),u(i,r)};I(C,i=>{b()&&i(H)})}var l=p(C,2),D=t(l),V=t(D,!0);a(D);var M=p(D,2),R=t(M,!0);a(M),a(l),a(y);var q=p(y,2);{var J=i=>{var r=ie(),w=t(r);{var K=o=>{F(o,{variant:"secondary",size:"md",icon:"",$$events:{click(...d){f()?.apply(this,d)}},children:(d,U)=>{N();var x=A();L(()=>_(x,k())),u(d,x)},$$slots:{default:!0}})};I(w,o=>{f()&&o(K)})}var O=p(w,2);{var Q=o=>{F(o,{variant:"danger",size:"md",icon:"",$$events:{click(...d){h()?.apply(this,d)}},children:(d,U)=>{N();var x=A();L(()=>_(x,z())),u(d,x)},$$slots:{default:!0}})};I(O,o=>{h()&&o(Q)})}a(r),u(i,r)};I(q,i=>{(f()||h())&&i(J)})}a(m),a(v),a(c),L(()=>{Z(D,1,`text-2xl font-bold text-gray-900 dark:text-white ${g()??""}`),_(V,n()),_(R,E())}),u(j,c)}var oe=S('');function ye(j,e){X(e,!1);let n=s(e,"instances",8),E=s(e,"entityType",8),b=s(e,"onDeleteInstance",8);const f=[{key:"name",title:"Name",cellComponent:le,cellProps:{entityType:"instance",nameField:"name"}},{key:"status",title:"Status",cellComponent:G,cellProps:{statusType:"instance",statusField:"status"}},{key:"runner_status",title:"Runner Status",cellComponent:G,cellProps:{statusType:"instance",statusField:"runner_status"}},{key:"created",title:"Created",cellComponent:ae,cellProps:{field:"created_at",type:"date"}},{key:"actions",title:"Actions",align:"right",cellComponent:se,cellProps:{actions:[{type:"delete",label:"Delete",title:"Delete instance",ariaLabel:"Delete instance",action:"delete"}]}}],h={entityType:"instance",primaryText:{field:"name",isClickable:!0,href:"/instances/{name}"},secondaryText:{field:"provider_id"},badges:[{type:"status",field:"status"}],actions:[{type:"delete",handler:l=>k(l)}]};function k(l){b()(l)}function z(l){k(l.detail.item)}W();var g=oe(),c=t(g),v=t(c),m=t(v),y=t(m);a(m);var C=p(m,2);a(v);var H=p(v,2);te(H,{get columns(){return f},get data(){return n()},loading:!1,error:"",searchTerm:"",showSearch:!1,showPagination:!1,currentPage:1,get perPage(){return P(n()),T(()=>n().length)},totalPages:1,get totalItems(){return P(n()),T(()=>n().length)},itemName:"instances",emptyTitle:"No instances running",get emptyMessage(){return`No instances running for this ${E()??""}.`},emptyIconType:"cog",get mobileCardConfig(){return h},$$events:{delete:z}}),a(c),a(g),L(l=>{_(y,`Instances (${P(n()),T(()=>n().length)??""})`),ee(C,"href",l)},[()=>(P(B),T(()=>B("/instances")))]),u(j,g),Y()}export{ge as D,ye as I}; diff --git a/webapp/assets/_app/immutable/chunks/BJXodF8n.js b/webapp/assets/_app/immutable/chunks/BJXodF8n.js deleted file mode 100644 index a90b93bc..00000000 --- a/webapp/assets/_app/immutable/chunks/BJXodF8n.js +++ /dev/null @@ -1 +0,0 @@ -import"./DsnmJJEf.js";import{i as v}from"./zNh6Oe5P.js";import{p as w,l as m,n as s,g as r,m as g,a as x,B as h,b as T,c as B,d as S,s as k,u}from"./sWNKMed7.js";import{k as A}from"./DCYYzf48.js";import{p as d}from"./Ccl3fNd2.js";import{k as b,B as C}from"./cjRLNre3.js";import{f as E}from"./ow_oMtSd.js";function q(_,i){w(i,!1);const c=g(),n=g();let e=d(i,"item",8),l=d(i,"statusType",8,"entity"),a=d(i,"statusField",8,"status");m(()=>(s(e()),s(a())),()=>{k(c,e()?.[a()]||"unknown")}),m(()=>(s(e()),s(l()),r(c),s(a())),()=>{k(n,(()=>{if(!e())return{variant:"error",text:"Unknown"};switch(l()){case"entity":return b(e());case"instance":let t="secondary";switch(r(c).toLowerCase()){case"running":t="success";break;case"stopped":t="info";break;case"creating":case"pending_create":t="warning";break;case"deleting":case"pending_delete":case"pending_force_delete":t="warning";break;case"error":case"deleted":t="error";break;case"active":case"online":t="success";break;case"idle":t="info";break;case"pending":case"installing":t="warning";break;case"failed":case"terminated":case"offline":t="error";break;case"unknown":default:t="secondary";break}return{variant:t,text:E(r(c))};case"enabled":return{variant:e().enabled?"success":"error",text:e().enabled?"Enabled":"Disabled"};case"custom":const o=e()[a()]||"Unknown";if(a()==="auth-type"){const f=o==="pat"||!o?"pat":"app";return{variant:f==="pat"?"success":"info",text:f==="pat"?"PAT":"App"}}return{variant:"info",text:o};default:return b(e())}})())}),x(),v();var p=h(),y=T(p);A(y,()=>(s(e()),s(a()),u(()=>`${e()?.name||"item"}-${e()?.[a()]||"status"}-${e()?.updated_at||"time"}`)),t=>{C(t,{get variant(){return r(n),u(()=>r(n).variant)},get text(){return r(n),u(()=>r(n).text)}})}),B(_,p),S()}export{q as S}; diff --git a/webapp/assets/_app/immutable/chunks/BOoHOAHB.js b/webapp/assets/_app/immutable/chunks/BOoHOAHB.js deleted file mode 100644 index 0b018ac1..00000000 --- a/webapp/assets/_app/immutable/chunks/BOoHOAHB.js +++ /dev/null @@ -1 +0,0 @@ -import"./DsnmJJEf.js";import{i as cr}from"./zNh6Oe5P.js";import{p as pr,E as vr,m as u,o as fr,s as n,f as E,j as d,r as t,k as a,g as e,t as _,x as He,u as h,z as mr,n as D,v as k,e as w,c as m,D as xr,d as yr}from"./sWNKMed7.js";import{p as _r,i as ge,s as hr,a as kr}from"./Ccl3fNd2.js";import{e as wr,i as Er}from"./BuuPrWMc.js";import{r as c,b as Ke,c as Rr}from"./DVl4ZBgx.js";import{b as p,a as $r}from"./CLagxtgo.js";import{p as Sr}from"./D4Caz1gY.js";import{M as Tr}from"./DN14Fk2Y.js";import{J as Or}from"./CkYhV7Br.js";import{e as Pr}from"./BZiHL9L3.js";import{e as Jr}from"./DA-798Ko.js";var Mr=E('

                '),Nr=E(' '),Ur=E('
                '),Ar=E('
                Updating...
                '),Ir=E('

                Pool Information (Read-only)

                Provider:
                Entity:

                Image & OS Configuration

                Runner Limits & Timing

                Advanced Settings

                Tags
                Extra Specs (JSON)
                ');function Qr(We,ce){pr(ce,!1);const[qe,Qe]=hr(),B=()=>kr(Jr,"$eagerCache",qe);let o=_r(ce,"pool",8);const G=vr();let R=u(!1),$=u(""),S=u(o().image||""),T=u(o().flavor||""),O=u(o().max_runners),P=u(o().min_idle_runners),J=u(o().runner_bootstrap_timeout),M=u(o().priority),N=u(o().runner_prefix||""),x=u(o().os_type||"linux"),y=u(o().os_arch||"amd64"),U=u(o()["github-runner-group"]||""),A=u(o().enabled),g=u((o().tags||[]).map(i=>i.name||"").filter(Boolean)),f=u(""),v=u("{}");function Ve(i){if(i.repo_id){const s=B().repositories.find(l=>l.id===i.repo_id);return s?`${s.owner}/${s.name}`:"Unknown Entity"}if(i.org_id){const s=B().organizations.find(l=>l.id===i.org_id);return s&&s.name?s.name:"Unknown Entity"}if(i.enterprise_id){const s=B().enterprises.find(l=>l.id===i.enterprise_id);return s&&s.name?s.name:"Unknown Entity"}return"Unknown Entity"}function Xe(i){return i.repo_id?"Repository":i.org_id?"Organization":i.enterprise_id?"Enterprise":"Unknown"}fr(()=>{if(o().extra_specs)try{if(typeof o().extra_specs=="object")n(v,JSON.stringify(o().extra_specs,null,2));else{const i=JSON.parse(o().extra_specs);n(v,JSON.stringify(i,null,2))}}catch{n(v,o().extra_specs||"{}")}});function pe(){e(f).trim()&&!e(g).includes(e(f).trim())&&(n(g,[...e(g),e(f).trim()]),n(f,""))}function Ye(i){n(g,e(g).filter((s,l)=>l!==i))}function Ze(i){i.key==="Enter"&&(i.preventDefault(),pe())}async function er(){try{n(R,!0),n($,"");let i={};if(e(v).trim())try{i=JSON.parse(e(v))}catch{throw new Error("Invalid JSON in extra specs")}const s={image:e(S)!==o().image?e(S):void 0,flavor:e(T)!==o().flavor?e(T):void 0,max_runners:e(O)!==o().max_runners?e(O):void 0,min_idle_runners:e(P)!==o().min_idle_runners?e(P):void 0,runner_bootstrap_timeout:e(J)!==o().runner_bootstrap_timeout?e(J):void 0,priority:e(M)!==o().priority?e(M):void 0,runner_prefix:e(N)!==o().runner_prefix?e(N):void 0,os_type:e(x)!==o().os_type?e(x):void 0,os_arch:e(y)!==o().os_arch?e(y):void 0,"github-runner-group":e(U)!==o()["github-runner-group"]&&e(U)||void 0,enabled:e(A)!==o().enabled?e(A):void 0,tags:JSON.stringify(e(g))!==JSON.stringify((o().tags||[]).map(l=>l.name||"").filter(Boolean))?e(g):void 0,extra_specs:e(v).trim()!==JSON.stringify(o().extra_specs||{},null,2).trim()?i:void 0};Object.keys(s).forEach(l=>{s[l]===void 0&&delete s[l]}),G("submit",s)}catch(i){n($,Pr(i))}finally{n(R,!1)}}cr(),Tr(We,{$$events:{close:()=>G("close")},children:(i,s)=>{var l=Ir(),z=d(l),ve=d(z),rr=d(ve);t(ve),t(z);var L=a(z,2),fe=d(L);{var tr=r=>{var b=Mr(),j=d(b),C=d(j,!0);t(j),t(b),_(()=>k(C,e($))),m(r,b)};ge(fe,r=>{e($)&&r(tr)})}var F=a(fe,2),me=a(d(F),2),H=d(me),xe=a(d(H),2),ar=d(xe,!0);t(xe),t(H);var ye=a(H,2),_e=a(d(ye),2),dr=d(_e);t(_e),t(ye),t(me),t(F);var K=a(F,2),he=a(d(K),2),W=d(he),ke=a(d(W),2);c(ke),t(W);var q=a(W,2),we=a(d(q),2);c(we),t(q);var Q=a(q,2),V=a(d(Q),2);_(()=>{e(x),He(()=>{})});var X=d(V);X.value=X.__value="linux";var Ee=a(X);Ee.value=Ee.__value="windows",t(V),t(Q);var Re=a(Q,2),Y=a(d(Re),2);_(()=>{e(y),He(()=>{})});var Z=d(Y);Z.value=Z.__value="amd64";var $e=a(Z);$e.value=$e.__value="arm64",t(Y),t(Re),t(he),t(K);var ee=a(K,2),Se=a(d(ee),2),re=d(Se),Te=a(d(re),2);c(Te),t(re);var te=a(re,2),Oe=a(d(te),2);c(Oe),t(te);var Pe=a(te,2),Je=a(d(Pe),2);c(Je),t(Pe),t(Se),t(ee);var ae=a(ee,2),de=a(d(ae),2),oe=d(de),Me=a(d(oe),2);c(Me),t(oe);var ie=a(oe,2),Ne=a(d(ie),2);c(Ne),t(ie);var Ue=a(ie,2),Ae=a(d(Ue),2);c(Ae),t(Ue),t(de);var ne=a(de,2),Ie=d(ne),je=a(d(Ie),2),se=d(je),I=d(se);c(I);var or=a(I,2);t(se);var ir=a(se,2);{var nr=r=>{var b=Ur();wr(b,5,()=>e(g),Er,(j,C,gr)=>{var be=Nr(),Le=d(be),Fe=a(Le);t(be),_(()=>{k(Le,`${e(C)??""} `),Rr(Fe,"aria-label",`Remove tag ${e(C)??""}`)}),w("click",Fe,()=>Ye(gr)),m(j,be)}),t(b),m(r,b)};ge(ir,r=>{e(g),h(()=>e(g).length>0)&&r(nr)})}t(je),t(Ie),t(ne);var le=a(ne,2),Ce=d(le),sr=a(d(Ce),2);Or(sr,{rows:4,placeholder:"{}",get value(){return e(v)},set value(r){n(v,r)},$$legacy:!0}),t(Ce),t(le);var De=a(le,2),Be=d(De);c(Be),mr(2),t(De),t(ae);var Ge=a(ae,2),ze=d(Ge),ue=a(ze,2),lr=d(ue);{var ur=r=>{var b=Ar();m(r,b)},br=r=>{var b=xr("Update Pool");m(r,b)};ge(lr,r=>{e(R)?r(ur):r(br,!1)})}t(ue),t(Ge),t(L),t(l),_((r,b)=>{k(rr,`Update Pool ${D(o()),h(()=>o().id)??""}`),k(ar,(D(o()),h(()=>o().provider_name))),k(dr,`${r??""}: ${b??""}`),ue.disabled=e(R)},[()=>(D(o()),h(()=>Xe(o()))),()=>(D(o()),h(()=>Ve(o())))]),p(ke,()=>e(S),r=>n(S,r)),p(we,()=>e(T),r=>n(T,r)),Ke(V,()=>e(x),r=>n(x,r)),Ke(Y,()=>e(y),r=>n(y,r)),p(Te,()=>e(P),r=>n(P,r)),p(Oe,()=>e(O),r=>n(O,r)),p(Je,()=>e(J),r=>n(J,r)),p(Me,()=>e(N),r=>n(N,r)),p(Ne,()=>e(M),r=>n(M,r)),p(Ae,()=>e(U),r=>n(U,r)),p(I,()=>e(f),r=>n(f,r)),w("keydown",I,Ze),w("click",or,pe),$r(Be,()=>e(A),r=>n(A,r)),w("click",ze,()=>G("close")),w("submit",L,Sr(er)),m(i,l)},$$slots:{default:!0}}),yr(),Qe()}export{Qr as U}; diff --git a/webapp/assets/_app/immutable/chunks/BRFhz4VJ.js b/webapp/assets/_app/immutable/chunks/BRFhz4VJ.js deleted file mode 100644 index 27f45251..00000000 --- a/webapp/assets/_app/immutable/chunks/BRFhz4VJ.js +++ /dev/null @@ -1 +0,0 @@ -import"./DsnmJJEf.js";import{i as Lr}from"./zNh6Oe5P.js";import{p as qr,E as Gr,m as s,o as Jr,f as m,k as r,j as o,g as e,r as a,t as v,e as E,c as b,v as T,b as Nr,z as pr,x as W,u as p,s as i,D as Ae,d as Vr}from"./sWNKMed7.js";import{p as mr,i as z}from"./Ccl3fNd2.js";import{e as Oe,i as $e}from"./BuuPrWMc.js";import{s as He,r as h,b as Q,g as C,c as Fr}from"./DVl4ZBgx.js";import{b as w,a as Kr}from"./CLagxtgo.js";import{p as Ur}from"./D4Caz1gY.js";import{M as Wr}from"./DN14Fk2Y.js";import{e as Be}from"./BZiHL9L3.js";import{J as Qr}from"./CkYhV7Br.js";var Xr=m('

                '),Yr=m('
                '),Zr=m(""),et=m(''),rt=m('
                '),tt=m(""),at=m(''),ot=m(' '),it=m('
                '),dt=m('

                Entity & Provider Configuration

                Image & OS Configuration

                Runner Limits & Timing

                Advanced Settings

                Extra Specs (JSON)
                ',1),st=m('
                Creating...
                '),lt=m('

                Create New Pool

                Entity Level *
                ');function ht(fr,X){qr(X,!1);const G=Gr();let Y=mr(X,"initialEntityType",8,""),De=mr(X,"initialEntityId",8,""),J=s(!1),M=s(""),n=s(Y()),I=s([]),Z=s([]),ee=s(!1),re=s(!1),k=s(De()),S=s(""),B=s(""),D=s(""),te=s(void 0),ae=s(void 0),oe=s(void 0),ie=s(100),de=s("garm"),N=s("linux"),V=s("amd64"),se=s(""),le=s(!0),_=s([]),j=s(""),L=s("{}");async function yr(){try{i(re,!0),i(Z,await C.listProviders())}catch(l){i(M,Be(l))}finally{i(re,!1)}}async function Le(){if(e(n))try{switch(i(ee,!0),i(I,[]),e(n)){case"repository":i(I,await C.listRepositories());break;case"organization":i(I,await C.listOrganizations());break;case"enterprise":i(I,await C.listEnterprises());break}}catch(l){i(M,Be(l))}finally{i(ee,!1)}}function ne(l){e(n)!==l&&(i(n,l),i(k,""),Le())}function qe(){e(j).trim()&&!e(_).includes(e(j).trim())&&(i(_,[...e(_),e(j).trim()]),i(j,""))}function xr(l){i(_,e(_).filter((P,A)=>A!==l))}function hr(l){l.key==="Enter"&&(l.preventDefault(),qe())}async function kr(){if(!e(n)||!e(k)||!e(S)||!e(B)||!e(D)){i(M,"Please fill in all required fields");return}try{i(J,!0),i(M,"");let l={};if(e(L).trim())try{l=JSON.parse(e(L))}catch{throw new Error("Invalid JSON in extra specs")}const P={provider_name:e(S),image:e(B),flavor:e(D),max_runners:e(te)||10,min_idle_runners:e(ae)||0,runner_bootstrap_timeout:e(oe)||20,priority:e(ie),runner_prefix:e(de),os_type:e(N),os_arch:e(V),"github-runner-group":e(se)||void 0,enabled:e(le),tags:e(_),extra_specs:e(L).trim()?l:void 0};if(Y()&&De())G("submit",P);else{switch(e(n)){case"repository":await C.createRepositoryPool(e(k),P);break;case"organization":await C.createOrganizationPool(e(k),P);break;case"enterprise":await C.createEnterprisePool(e(k),P);break;default:throw new Error("Invalid entity level")}G("submit",P)}}catch(l){i(M,Be(l))}finally{i(J,!1)}}Jr(()=>{yr(),Y()&&Le()}),Lr(),Wr(fr,{$$events:{close:()=>G("close")},children:(l,P)=>{var A=lt(),ue=r(o(A),2),Ge=o(ue);{var _r=c=>{var y=Xr(),O=o(y),F=o(O,!0);a(O),a(y),v(()=>T(F,e(M))),b(c,y)};z(Ge,c=>{e(M)&&c(_r)})}var be=r(Ge,2),Je=r(o(be),2),ce=o(Je),ge=r(ce,2),Ne=r(ge,2);a(Je),a(be);var Ve=r(be,2);{var wr=c=>{var y=dt(),O=Nr(y),F=r(o(O),2),pe=o(F),me=o(pe),Rr=o(me);pr(),a(me);var Tr=r(me,2);{var zr=t=>{var u=Yr();b(t,u)},Cr=t=>{var u=et();v(()=>{e(k),W(()=>{e(n),e(I)})});var f=o(u),$=o(f);a(f),f.value=f.__value="";var R=r(f);Oe(R,1,()=>e(I),$e,(g,d)=>{var x=Zr(),U=o(x);{var Br=H=>{var q=Ae();v(()=>T(q,`${e(d),p(()=>e(d).owner)??""}/${e(d),p(()=>e(d).name)??""} (${e(d),p(()=>e(d).endpoint?.name)??""})`)),b(H,q)},Dr=H=>{var q=Ae();v(()=>T(q,`${e(d),p(()=>e(d).name)??""} (${e(d),p(()=>e(d).endpoint?.name)??""})`)),b(H,q)};z(U,H=>{e(n)==="repository"?H(Br):H(Dr,!1)})}a(x);var vr={};v(()=>{vr!==(vr=(e(d),p(()=>e(d).id)))&&(x.value=(x.__value=(e(d),p(()=>e(d).id)))??"")}),b(g,x)}),a(u),v(()=>T($,`Select a ${e(n)??""}`)),Q(u,()=>e(k),g=>i(k,g)),b(t,u)};z(Tr,t=>{e(ee)?t(zr):t(Cr,!1)})}a(pe);var Ue=r(pe,2),Ir=r(o(Ue),2);{var Sr=t=>{var u=rt();b(t,u)},jr=t=>{var u=at();v(()=>{e(S),W(()=>{e(Z)})});var f=o(u);f.value=f.__value="";var $=r(f);Oe($,1,()=>e(Z),$e,(R,g)=>{var d=tt(),x=o(d,!0);a(d);var U={};v(()=>{T(x,(e(g),p(()=>e(g).name))),U!==(U=(e(g),p(()=>e(g).name)))&&(d.value=(d.__value=(e(g),p(()=>e(g).name)))??"")}),b(R,d)}),a(u),Q(u,()=>e(S),R=>i(S,R)),b(t,u)};z(Ir,t=>{e(re)?t(Sr):t(jr,!1)})}a(Ue),a(F),a(O);var fe=r(O,2),We=r(o(fe),2),ye=o(We),Qe=r(o(ye),2);h(Qe),a(ye);var xe=r(ye,2),Xe=r(o(xe),2);h(Xe),a(xe);var he=r(xe,2),ke=r(o(he),2);v(()=>{e(N),W(()=>{})});var _e=o(ke);_e.value=_e.__value="linux";var Ye=r(_e);Ye.value=Ye.__value="windows",a(ke),a(he);var Ze=r(he,2),we=r(o(Ze),2);v(()=>{e(V),W(()=>{})});var Ee=o(we);Ee.value=Ee.__value="amd64";var er=r(Ee);er.value=er.__value="arm64",a(we),a(Ze),a(We),a(fe);var Me=r(fe,2),rr=r(o(Me),2),Pe=o(rr),tr=r(o(Pe),2);h(tr),a(Pe);var Re=r(Pe,2),ar=r(o(Re),2);h(ar),a(Re);var or=r(Re,2),ir=r(o(or),2);h(ir),a(or),a(rr),a(Me);var dr=r(Me,2),Te=r(o(dr),2),ze=o(Te),sr=r(o(ze),2);h(sr),a(ze);var Ce=r(ze,2),lr=r(o(Ce),2);h(lr),a(Ce);var nr=r(Ce,2),ur=r(o(nr),2);h(ur),a(nr),a(Te);var Ie=r(Te,2),br=r(o(Ie),2),Se=o(br),K=o(Se);h(K);var Ar=r(K,2);a(Se);var Or=r(Se,2);{var $r=t=>{var u=it();Oe(u,5,()=>e(_),$e,(f,$,R)=>{var g=ot(),d=o(g),x=r(d);a(g),v(()=>{T(d,`${e($)??""} `),Fr(x,"aria-label",`Remove tag ${e($)}`)}),E("click",x,()=>xr(R)),b(f,g)}),a(u),b(t,u)};z(Or,t=>{e(_),p(()=>e(_).length>0)&&t($r)})}a(br),a(Ie);var je=r(Ie,2),Hr=r(o(je),2);Qr(Hr,{rows:4,placeholder:"{}",get value(){return e(L)},set value(t){i(L,t)},$$legacy:!0}),a(je);var cr=r(je,2),gr=o(cr);h(gr),pr(2),a(cr),a(dr),v(t=>T(Rr,`${t??""} `),[()=>(e(n),p(()=>e(n).charAt(0).toUpperCase()+e(n).slice(1)))]),w(Qe,()=>e(B),t=>i(B,t)),w(Xe,()=>e(D),t=>i(D,t)),Q(ke,()=>e(N),t=>i(N,t)),Q(we,()=>e(V),t=>i(V,t)),w(tr,()=>e(ae),t=>i(ae,t)),w(ar,()=>e(te),t=>i(te,t)),w(ir,()=>e(oe),t=>i(oe,t)),w(sr,()=>e(de),t=>i(de,t)),w(lr,()=>e(ie),t=>i(ie,t)),w(ur,()=>e(se),t=>i(se,t)),w(K,()=>e(j),t=>i(j,t)),E("keydown",K,hr),E("click",Ar,qe),Kr(gr,()=>e(le),t=>i(le,t)),b(c,y)};z(Ve,c=>{e(n)&&c(wr)})}var Fe=r(Ve,2),Ke=o(Fe),ve=r(Ke,2),Er=o(ve);{var Mr=c=>{var y=st();b(c,y)},Pr=c=>{var y=Ae("Create Pool");b(c,y)};z(Er,c=>{e(J)?c(Mr):c(Pr,!1)})}a(ve),a(Fe),a(ue),a(A),v(()=>{He(ce,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="repository"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),He(ge,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="organization"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),He(Ne,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="enterprise"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),ve.disabled=e(J)||!e(n)||!e(k)||!e(S)||!e(B)||!e(D)}),E("click",ce,()=>ne("repository")),E("click",ge,()=>ne("organization")),E("click",Ne,()=>ne("enterprise")),E("click",Ke,()=>G("close")),E("submit",ue,Ur(kr)),b(l,A)},$$slots:{default:!0}}),Vr()}export{ht as C}; diff --git a/webapp/assets/_app/immutable/chunks/BZUCTtPY.js b/webapp/assets/_app/immutable/chunks/BZUCTtPY.js deleted file mode 100644 index ff78735d..00000000 --- a/webapp/assets/_app/immutable/chunks/BZUCTtPY.js +++ /dev/null @@ -1 +0,0 @@ -import{I as u}from"./sWNKMed7.js";function c(){const{subscribe:s,set:i,update:o}=u([]),n={subscribe:s,add:e=>{const t=Math.random().toString(36).substr(2,9),r={...e,id:t,duration:e.duration??5e3};return o(a=>[...a,r]),r.duration&&r.duration>0&&setTimeout(()=>{o(a=>a.filter(d=>d.id!==t))},r.duration),t},remove:e=>{o(t=>t.filter(r=>r.id!==e))},clear:()=>{i([])},success:(e,t="",r)=>n.add({type:"success",title:e,message:t,duration:r}),error:(e,t="",r)=>n.add({type:"error",title:e,message:t,duration:r}),info:(e,t="",r)=>n.add({type:"info",title:e,message:t,duration:r}),warning:(e,t="",r)=>n.add({type:"warning",title:e,message:t,duration:r})};return n}const p=c();export{p as t}; diff --git a/webapp/assets/_app/immutable/chunks/BZiHL9L3.js b/webapp/assets/_app/immutable/chunks/BZiHL9L3.js deleted file mode 100644 index 586e429f..00000000 --- a/webapp/assets/_app/immutable/chunks/BZiHL9L3.js +++ /dev/null @@ -1 +0,0 @@ -function n(e){let s="An unexpected error occurred";if(e&&typeof e=="object"){if("response"in e&&e.response&&typeof e.response=="object"){const a=e.response;if("data"in a&&a.data&&typeof a.data=="object"){const t=a.data,r=t.error&&t.error.trim()?t.error:"",i=t.details&&t.details.trim()?t.details:"";if(r&&i)return`${r}. ${i}`;if(r)return r;if(i)return i}if("status"in a){const t=a.status;switch(t){case 400:s="Bad request - please check your input";break;case 401:s="Unauthorized - please log in again";break;case 403:s="Access denied - insufficient permissions";break;case 404:s="Resource not found";break;case 409:s="Conflict - resource already exists or is in use";break;case 422:s="Validation failed - please check your input";break;case 500:s="Internal server error - please try again later";break;default:s=`Request failed with status ${t}`}}}else if(e instanceof Error&&e.message&&!e.message.includes("status code"))return e.message}return s}export{n as e}; diff --git a/webapp/assets/_app/immutable/chunks/BaVaT5nU.js b/webapp/assets/_app/immutable/chunks/BaVaT5nU.js deleted file mode 100644 index 34a6f851..00000000 --- a/webapp/assets/_app/immutable/chunks/BaVaT5nU.js +++ /dev/null @@ -1 +0,0 @@ -import"./DsnmJJEf.js";import{i as b}from"./zNh6Oe5P.js";import{p as k,f as E,t as C,u as i,n as t,v as n,c as j,d as P,k as z,j as l,r as o}from"./sWNKMed7.js";import{c as N}from"./DVl4ZBgx.js";import{p as f}from"./Ccl3fNd2.js";import{j as x,e as c,i as u}from"./cjRLNre3.js";var T=E('');function F(d,r){k(r,!1);let e=f(r,"item",8),m=f(r,"eagerCache",8,null);b();var s=T(),a=l(s),v=l(a,!0);o(a);var p=z(a,2),g=l(p,!0);o(p),o(s),C((h,y,_)=>{N(a,"href",h),n(v,y),n(g,_)},[()=>(t(x),t(e()),i(()=>x(e()))),()=>(t(c),t(e()),t(m()),i(()=>c(e(),m()))),()=>(t(u),t(e()),i(()=>u(e())))]),j(d,s),P()}export{F as P}; diff --git a/webapp/assets/_app/immutable/chunks/BuuPrWMc.js b/webapp/assets/_app/immutable/chunks/BuuPrWMc.js deleted file mode 100644 index 72f1fd13..00000000 --- a/webapp/assets/_app/immutable/chunks/BuuPrWMc.js +++ /dev/null @@ -1 +0,0 @@ -import{O as K,M as te,Y as F,K as M,Z as ae,L as re,g as $,q as oe,_ as ie,a0 as se,a1 as J,a2 as G,T as L,a3 as le,a4 as ce,P as q,R as fe,Q as ue,a5 as V,m as de,a6 as B,a7 as Q,a8 as ve,a9 as D,aa as X,U as he,ab as R,ac as Se,ad as P,ae as Z,af as me,ag as _e,ah as we,ai as Ee,aj as pe,ak as z,H as be,al as ge,I as Ie}from"./sWNKMed7.js";function Te(c,o){return o}function Ne(c,o,n){for(var t=c.items,u=[],S=o.length,s=0;s0&&u.length===0&&n!==null;if(d){var N=n.parentNode;we(N),N.append(n),t.clear(),T(c,o[0].prev,o[S-1].next)}Ee(u,()=>{for(var m=0;m{var _=n();return ve(_)?_:_==null?[]:Q(_)}),r,v;function f(){ke(v,r,d,E,s,u,o,t,n),S!==null&&(r.length===0?i?X(i):i=q(()=>S(s)):i!==null&&he(i,()=>{i=null}))}te(()=>{v??=pe,r=$(x);var _=r.length;if(g&&_===0)return;g=_===0;let p=!1;if(M){var k=ie(s)===se;k!==(_===0)&&(s=J(),F(s),G(!1),p=!0)}if(M){for(var C=null,b,e=0;e<_;e++){if(L.nodeType===le&&L.data===ce){s=L,p=!0,G(!1);break}var a=r[e],l=t(a,e);b=Y(L,d,C,null,a,l,e,u,o,n),d.items.set(l,b),C=b}_>0&&F(J())}if(M)_===0&&S&&(i=q(()=>S(s)));else if(fe()){var w=new Set,O=ue;for(e=0;e<_;e+=1){a=r[e],l=t(a,e);var h=d.items.get(l)??E.get(l);h?(o&(R|D))!==0&&j(h,a,e,o):(b=Y(null,d,null,null,a,l,e,u,o,n,!0),E.set(l,b)),w.add(l)}for(const[I,W]of d.items)w.has(I)||O.skipped_effects.add(W.e);O.add_callback(f)}else f();p&&G(!0),$(x)}),M&&(s=L)}function ke(c,o,n,t,u,S,s,d,N){var m=(s&ge)!==0,i=(s&(R|D))!==0,g=o.length,E=n.items,x=n.first,r=x,v,f=null,_,p=[],k=[],C,b,e,a;if(m)for(a=0;a0){var ee=(s&z)!==0&&g===0?u:null;if(m){for(a=0;a{if(_!==void 0)for(e of _)e.a?.apply()}),c.first=n.first&&n.first.e,c.last=f&&f.e;for(var ne of t.values())Z(ne.e);t.clear()}function j(c,o,n,t){(t&R)!==0&&V(c.v,o),(t&D)!==0?V(c.i,n):c.i=n}function Y(c,o,n,t,u,S,s,d,N,m,i){var g=(N&R)!==0,E=(N&Se)===0,x=g?E?de(u,!1,!1):B(u):u,r=(N&D)===0?s:B(s),v={i:r,v:x,k:S,a:null,e:null,prev:n,next:t};try{if(c===null){var f=document.createDocumentFragment();f.append(c=K())}return v.e=q(()=>d(c,x,r,m),M),v.e.prev=n&&n.e,v.e.next=t&&t.e,n===null?i||(o.first=v):(n.next=v,n.e.next=v.e),t!==null&&(t.prev=v,t.e.prev=v.e),v}finally{}}function U(c,o,n){for(var t=c.next?c.next.e.nodes_start:n,u=o?o.e.nodes_start:n,S=c.e.nodes_start;S!==null&&S!==t;){var s=me(S);u.before(S),S=s}}function T(c,o,n){o===null?c.first=n:(o.next=n,o.e.next=n&&n.e),n!==null&&(n.prev=o,n.e.prev=o&&o.e)}function Ce(){const{subscribe:c,set:o,update:n}=Ie({connected:!1,connecting:!1,error:null,lastEvent:null});let t=null,u=0,S=50,s=1e3,d=1e3,N=3e4,m=null,i=[],g=!1;const E=new Map;function x(){const e=window.location.protocol==="https:"?"wss:":"ws:",a=window.location.host;return`${e}//${a}/api/v1/ws/events`}function r(){if(!(t&&(t.readyState===WebSocket.CONNECTING||t.readyState===WebSocket.OPEN))){g=!1,n(e=>({...e,connecting:!0,error:null}));try{const e=x();t=new WebSocket(e);const a=setTimeout(()=>{t&&t.readyState===WebSocket.CONNECTING&&t.close()},1e4);t.onopen=()=>{clearTimeout(a),u=0,d=s,n(l=>({...l,connected:!0,connecting:!1,error:null})),i.length>0&&p(i)},t.onmessage=l=>{try{const w=JSON.parse(l.data);n(h=>({...h,lastEvent:w})),(E.get(w["entity-type"])||[]).forEach(h=>{try{h(w)}catch(I){console.error("[WebSocket] Error in event callback:",I)}})}catch(w){console.error("[WebSocket] Error parsing message:",w)}},t.onclose=l=>{clearTimeout(a);const w=l.code===1e3&&g,O=l.code!==1e3?`Connection closed: ${l.reason||"Unknown reason"}`:null;n(h=>({...h,connected:!1,connecting:!1,error:O})),w||_()},t.onerror=l=>{clearTimeout(a),n(w=>({...w,connected:!1,connecting:!1,error:"WebSocket connection error"})),g||_()}}catch(e){n(a=>({...a,connected:!1,connecting:!1,error:e instanceof Error?e.message:"Failed to connect"}))}}}function v(){}function f(){}function _(){if(g)return;m&&clearTimeout(m),u++,u>S&&(u=1,d=s);const e=Math.min(d,N);m=window.setTimeout(()=>{if(!g){r();const a=Math.random()*1e3;d=Math.min(d*1.5+a,N)}},e)}function p(e){if(t&&t.readyState===WebSocket.OPEN){const a={"send-everything":!1,filters:e};t.send(JSON.stringify(a)),i=[...e]}}function k(){g=!0,m&&(clearTimeout(m),m=null),t&&(t.close(1e3,"Manual disconnect"),t=null),E.clear(),i=[],n(e=>({...e,connected:!1,connecting:!1,error:null,lastEvent:null}))}function C(){navigator.onLine&&!g&&setTimeout(()=>{(!t||t.readyState===WebSocket.CLOSED||t.readyState===WebSocket.CLOSING)&&(u=0,d=s,r())},2e3)}typeof window<"u"&&(window.addEventListener("online",C),window.addEventListener("offline",()=>{n(e=>({...e,error:"Network offline"}))}),setInterval(()=>{g||(!t||t.readyState===WebSocket.CLOSED||t.readyState===WebSocket.CLOSING)&&r()},1e4));function b(e,a,l){E.has(e)||E.set(e,[]),E.get(e).push(l);const w=i.findIndex(h=>h["entity-type"]===e),O={"entity-type":e,operations:a};if(w>=0){const h=i[w].operations;O.operations=Array.from(new Set([...h,...a])),i[w]=O}else i.push(O);return t&&t.readyState===WebSocket.OPEN&&p(i),(!t||t.readyState===WebSocket.CLOSED||t.readyState===WebSocket.CLOSING)&&r(),()=>{const h=E.get(e);if(h){const I=h.indexOf(l);if(I>-1&&h.splice(I,1),h.length===0){E.delete(e);const W=i.findIndex(A=>A["entity-type"]===e);W>-1&&(i.splice(W,1),t&&t.readyState===WebSocket.OPEN&&p(i))}}}}return typeof window<"u"&&r(),{subscribe:c,connect:r,disconnect:k,subscribeToEntity:b}}const We=Ce();export{xe as e,Te as i,We as w}; diff --git a/webapp/assets/_app/immutable/chunks/C6O4o7G1.js b/webapp/assets/_app/immutable/chunks/C6O4o7G1.js deleted file mode 100644 index 4c1792d1..00000000 --- a/webapp/assets/_app/immutable/chunks/C6O4o7G1.js +++ /dev/null @@ -1 +0,0 @@ -const w=/^(\[)?(\.\.\.)?(\w+)(?:=(\w+))?(\])?$/;function x(t){const s=[];return{pattern:t==="/"?/^\/$/:new RegExp(`^${_(t).map(a=>{const i=/^\[\.\.\.(\w+)(?:=(\w+))?\]$/.exec(a);if(i)return s.push({name:i[1],matcher:i[2],optional:!1,rest:!0,chained:!0}),"(?:/([^]*))?";const c=/^\[\[(\w+)(?:=(\w+))?\]\]$/.exec(a);if(c)return s.push({name:c[1],matcher:c[2],optional:!0,rest:!1,chained:!0}),"(?:/([^/]+))?";if(!a)return;const n=a.split(/\[(.+?)\](?!\])/);return"/"+n.map((e,l)=>{if(l%2){if(e.startsWith("x+"))return h(String.fromCharCode(parseInt(e.slice(2),16)));if(e.startsWith("u+"))return h(String.fromCharCode(...e.slice(2).split("-").map(g=>parseInt(g,16))));const o=w.exec(e),[,u,p,m,d]=o;return s.push({name:m,matcher:d,optional:!!u,rest:!!p,chained:p?l===1&&n[0]==="":!1}),p?"([^]*?)":u?"([^/]*)?":"([^/]+?)"}return h(e)}).join("")}).join("")}/?$`),params:s}}function $(t){return t!==""&&!/^\([^)]+\)$/.test(t)}function _(t){return t.slice(1).split("/").filter($)}function k(t,s,f){const a={},i=t.slice(1),c=i.filter(r=>r!==void 0);let n=0;for(let r=0;ro).join("/"),n=0),l===void 0){e.rest&&(a[e.name]="");continue}if(!e.matcher||f[e.matcher](l)){a[e.name]=l;const o=s[r+1],u=i[r+1];o&&!o.rest&&o.optional&&u&&e.chained&&(n=0),!o&&!u&&Object.keys(a).length===c.length&&(n=0);continue}if(e.optional&&e.chained){n++;continue}return}if(!n)return a}function h(t){return t.normalize().replace(/[[\]]/g,"\\$&").replace(/%/g,"%25").replace(/\//g,"%2[Ff]").replace(/\?/g,"%3[Ff]").replace(/#/g,"%23").replace(/[.*+?^${}()|\\]/g,"\\$&")}const b=/\[(\[)?(\.\.\.)?(\w+?)(?:=(\w+))?\]\]?/g;function j(t,s){return"/"+_(t).map(a=>a.replace(b,(i,c,n,r)=>{const e=s[r];if(!e){if(c||n&&e!==void 0)return"";throw new Error(`Missing parameter '${r}' in route ${t}`)}if(e.startsWith("/")||e.endsWith("/"))throw new Error(`Parameter '${r}' in route ${t} cannot start or end with a slash -- this would cause an invalid route like foo//bar`);return e})).filter(Boolean).join("/")}const v=globalThis.__sveltekit_1ilkn2r?.base??"/ui",C=globalThis.__sveltekit_1ilkn2r?.assets??v;export{C as a,v as b,k as e,x as p,j as r}; diff --git a/webapp/assets/_app/immutable/chunks/CARsAFuo.js b/webapp/assets/_app/immutable/chunks/CARsAFuo.js deleted file mode 100644 index 4a24ed6b..00000000 --- a/webapp/assets/_app/immutable/chunks/CARsAFuo.js +++ /dev/null @@ -1 +0,0 @@ -import"./DsnmJJEf.js";import{i as _}from"./zNh6Oe5P.js";import{p as h,f as x,t as k,c as u,d as g,k as w,j as o,u as d,n as e,r,v as y}from"./sWNKMed7.js";import{h as z}from"./DVl4ZBgx.js";import{p as m}from"./Ccl3fNd2.js";import{g as v}from"./cjRLNre3.js";var E=x('
                ');function U(l,i){h(i,!1);let t=m(i,"item",8),s=m(i,"iconSize",8,"w-5 h-5");_();var a=E(),n=o(a),f=o(n);z(f,()=>(e(v),e(t()),e(s()),d(()=>v(t()?.endpoint?.endpoint_type||t()?.endpoint_type||"unknown",s())))),r(n);var p=w(n,2),c=o(p,!0);r(p),r(a),k(()=>y(c,(e(t()),d(()=>t()?.endpoint?.name||t()?.endpoint_name||t()?.endpoint_type||"Unknown")))),u(l,a),g()}export{U as E}; diff --git a/webapp/assets/_app/immutable/chunks/CCYOsezl.js b/webapp/assets/_app/immutable/chunks/CCYOsezl.js deleted file mode 100644 index 5f546d50..00000000 --- a/webapp/assets/_app/immutable/chunks/CCYOsezl.js +++ /dev/null @@ -1 +0,0 @@ -import{K as l,L as u,M as m,N as _,O as p,P as h,Q as v,R as b,T,U as g}from"./sWNKMed7.js";function y(s,i,d){l&&u();var r=s,a,n,e=null,t=null;function f(){n&&(g(n),n=null),e&&(e.lastChild.remove(),r.before(e),e=null),n=t,t=null}m(()=>{if(a!==(a=i())){var c=b();if(a){var o=r;c&&(e=document.createDocumentFragment(),e.append(o=p())),t=h(()=>d(o,a))}c?v.add_callback(f):f()}},_),l&&(r=T)}export{y as c}; diff --git a/webapp/assets/_app/immutable/chunks/CJwphPxi.js b/webapp/assets/_app/immutable/chunks/CJwphPxi.js deleted file mode 100644 index 4677cd24..00000000 --- a/webapp/assets/_app/immutable/chunks/CJwphPxi.js +++ /dev/null @@ -1 +0,0 @@ -import{s as e}from"./CPCsbdkz.js";const r=()=>{const s=e;return{page:{subscribe:s.page.subscribe},navigating:{subscribe:s.navigating.subscribe},updated:s.updated}},b={subscribe(s){return r().page.subscribe(s)}};export{b as p}; diff --git a/webapp/assets/_app/immutable/chunks/CLagxtgo.js b/webapp/assets/_app/immutable/chunks/CLagxtgo.js deleted file mode 100644 index af862710..00000000 --- a/webapp/assets/_app/immutable/chunks/CLagxtgo.js +++ /dev/null @@ -1 +0,0 @@ -import{V as b,W as o,u as h,G as _,K as t,Q as f,X as m}from"./sWNKMed7.js";function y(e,a,c=a){var v=b(),d=new WeakSet;o(e,"input",r=>{var l=r?e.defaultValue:e.value;if(l=n(e)?s(l):l,c(l),f!==null&&d.add(f),v&&l!==(l=a())){var k=e.selectionStart,u=e.selectionEnd;e.value=l??"",u!==null&&(e.selectionStart=k,e.selectionEnd=Math.min(u,e.value.length))}}),(t&&e.defaultValue!==e.value||h(a)==null&&e.value)&&(c(n(e)?s(e.value):e.value),f!==null&&d.add(f)),_(()=>{var r=a();if(e===document.activeElement){var l=m??f;if(d.has(l))return}n(e)&&r===s(e.value)||e.type==="date"&&!r&&!e.value||r!==e.value&&(e.value=r??"")})}function E(e,a,c=a){o(e,"change",v=>{var d=v?e.defaultChecked:e.checked;c(d)}),(t&&e.defaultChecked!==e.checked||h(a)==null)&&c(e.checked),_(()=>{var v=a();e.checked=!!v})}function n(e){var a=e.type;return a==="number"||a==="range"}function s(e){return e===""?null:+e}export{E as a,y as b}; diff --git a/webapp/assets/_app/immutable/chunks/CPCsbdkz.js b/webapp/assets/_app/immutable/chunks/CPCsbdkz.js deleted file mode 100644 index eec0a233..00000000 --- a/webapp/assets/_app/immutable/chunks/CPCsbdkz.js +++ /dev/null @@ -1,3 +0,0 @@ -import{I as Ae,o as De,aR as T,g as P,s as x,bf as mt,aS as $e}from"./sWNKMed7.js";import{p as yt,e as wt,a as vt,b as L}from"./C6O4o7G1.js";class le{constructor(t,n){this.status=t,typeof n=="string"?this.body={message:n}:n?this.body=n:this.body={message:`Error: ${t}`}}toString(){return JSON.stringify(this.body)}}class Ee{constructor(t,n){this.status=t,this.location=n}}class Se extends Error{constructor(t,n,r){super(r),this.status=t,this.text=n}}new URL("sveltekit-internal://");function bt(e,t){return e==="/"||t==="ignore"?e:t==="never"?e.endsWith("/")?e.slice(0,-1):e:t==="always"&&!e.endsWith("/")?e+"/":e}function kt(e){return e.split("%25").map(decodeURI).join("%25")}function At(e){for(const t in e)e[t]=decodeURIComponent(e[t]);return e}function _e({href:e}){return e.split("#")[0]}function Et(e,t,n,r=!1){const a=new URL(e);Object.defineProperty(a,"searchParams",{value:new Proxy(a.searchParams,{get(i,o){if(o==="get"||o==="getAll"||o==="has")return f=>(n(f),i[o](f));t();const l=Reflect.get(i,o);return typeof l=="function"?l.bind(i):l}}),enumerable:!0,configurable:!0});const s=["href","pathname","search","toString","toJSON"];r&&s.push("hash");for(const i of s)Object.defineProperty(a,i,{get(){return t(),e[i]},enumerable:!0,configurable:!0});return a}function St(...e){let t=5381;for(const n of e)if(typeof n=="string"){let r=n.length;for(;r;)t=t*33^n.charCodeAt(--r)}else if(ArrayBuffer.isView(n)){const r=new Uint8Array(n.buffer,n.byteOffset,n.byteLength);let a=r.length;for(;a;)t=t*33^r[--a]}else throw new TypeError("value must be a string or TypedArray");return(t>>>0).toString(36)}function Rt(e){const t=atob(e),n=new Uint8Array(t.length);for(let r=0;r((e instanceof Request?e.method:t?.method||"GET")!=="GET"&&G.delete(Re(e)),It(e,t));const G=new Map;function Ut(e,t){const n=Re(e,t),r=document.querySelector(n);if(r?.textContent){let{body:a,...s}=JSON.parse(r.textContent);const i=r.getAttribute("data-ttl");return i&&G.set(n,{body:a,init:s,ttl:1e3*Number(i)}),r.getAttribute("data-b64")!==null&&(a=Rt(a)),Promise.resolve(new Response(a,s))}return window.fetch(e,t)}function Lt(e,t,n){if(G.size>0){const r=Re(e,n),a=G.get(r);if(a){if(performance.now(){const{pattern:g,params:u}=yt(o),c={id:o,exec:h=>{const m=g.exec(h);if(m)return wt(m,u,r)},errors:[1,...d||[]].map(h=>e[h]),layouts:[0,...f||[]].map(i),leaf:s(l)};return c.errors.length=c.layouts.length=Math.max(c.errors.length,c.layouts.length),c});function s(o){const l=o<0;return l&&(o=~o),[l,e[o]]}function i(o){return o===void 0?o:[a.has(o),e[o]]}}function We(e,t=JSON.parse){try{return t(sessionStorage[e])}catch{}}function Be(e,t,n=JSON.stringify){const r=n(t);try{sessionStorage[e]=r}catch{}}const Pt="1756138203019",ze="sveltekit:snapshot",Xe="sveltekit:scroll",Ze="sveltekit:states",xt="sveltekit:pageurl",V="sveltekit:history",J="sveltekit:navigation",j={tap:1,hover:2,viewport:3,eager:4,off:-1,false:-1},Z=location.origin;function Ie(e){if(e instanceof URL)return e;let t=document.baseURI;if(!t){const n=document.getElementsByTagName("base");t=n.length?n[0].href:document.URL}return new URL(e,t)}function fe(){return{x:pageXOffset,y:pageYOffset}}function B(e,t){return e.getAttribute(`data-sveltekit-${t}`)}const Ve={...j,"":j.hover};function Qe(e){let t=e.assignedSlot??e.parentNode;return t?.nodeType===11&&(t=t.host),t}function et(e,t){for(;e&&e!==t;){if(e.nodeName.toUpperCase()==="A"&&e.hasAttribute("href"))return e;e=Qe(e)}}function we(e,t,n){let r;try{if(r=new URL(e instanceof SVGAElement?e.href.baseVal:e.href,document.baseURI),n&&r.hash.match(/^#[^/]/)){const o=location.hash.split("#")[1]||"/";r.hash=`#${o}${r.hash}`}}catch{}const a=e instanceof SVGAElement?e.target.baseVal:e.target,s=!r||!!a||ue(r,t,n)||(e.getAttribute("rel")||"").split(/\s+/).includes("external"),i=r?.origin===Z&&e.hasAttribute("download");return{url:r,external:s,target:a,download:i}}function te(e){let t=null,n=null,r=null,a=null,s=null,i=null,o=e;for(;o&&o!==document.documentElement;)r===null&&(r=B(o,"preload-code")),a===null&&(a=B(o,"preload-data")),t===null&&(t=B(o,"keepfocus")),n===null&&(n=B(o,"noscroll")),s===null&&(s=B(o,"reload")),i===null&&(i=B(o,"replacestate")),o=Qe(o);function l(f){switch(f){case"":case"true":return!0;case"off":case"false":return!1;default:return}}return{preload_code:Ve[r??"off"],preload_data:Ve[a??"off"],keepfocus:l(t),noscroll:l(n),reload:l(s),replace_state:l(i)}}function Fe(e){const t=Ae(e);let n=!0;function r(){n=!0,t.update(i=>i)}function a(i){n=!1,t.set(i)}function s(i){let o;return t.subscribe(l=>{(o===void 0||n&&l!==o)&&i(o=l)})}return{notify:r,set:a,subscribe:s}}const tt={v:()=>{}};function Nt(){const{set:e,subscribe:t}=Ae(!1);let n;async function r(){clearTimeout(n);try{const a=await fetch(`${vt}/_app/version.json`,{headers:{pragma:"no-cache","cache-control":"no-cache"}});if(!a.ok)return!1;const i=(await a.json()).version!==Pt;return i&&(e(!0),tt.v(),clearTimeout(n)),i}catch{return!1}}return{subscribe:t,check:r}}function ue(e,t,n){return e.origin!==Z||!e.pathname.startsWith(t)?!0:n?!(e.pathname===t+"/"||e.pathname===t+"/index.html"||e.protocol==="file:"&&e.pathname.replace(/\/[^/]+\.html?$/,"")===t):!1}function yn(e){}function qe(e){const t=Ct(e),n=new ArrayBuffer(t.length),r=new DataView(n);for(let a=0;a>16),t+=String.fromCharCode((n&65280)>>8),t+=String.fromCharCode(n&255),n=r=0);return r===12?(n>>=4,t+=String.fromCharCode(n)):r===18&&(n>>=2,t+=String.fromCharCode((n&65280)>>8),t+=String.fromCharCode(n&255)),t}const jt=-1,Dt=-2,$t=-3,Bt=-4,Vt=-5,Ft=-6;function qt(e,t){if(typeof e=="number")return a(e,!0);if(!Array.isArray(e)||e.length===0)throw new Error("Invalid input");const n=e,r=Array(n.length);function a(s,i=!1){if(s===jt)return;if(s===$t)return NaN;if(s===Bt)return 1/0;if(s===Vt)return-1/0;if(s===Ft)return-0;if(i)throw new Error("Invalid input");if(s in r)return r[s];const o=n[s];if(!o||typeof o!="object")r[s]=o;else if(Array.isArray(o))if(typeof o[0]=="string"){const l=o[0],f=t?.[l];if(f)return r[s]=f(a(o[1]));switch(l){case"Date":r[s]=new Date(o[1]);break;case"Set":const d=new Set;r[s]=d;for(let c=1;ct!=null)}const Ht="x-sveltekit-invalidated",Kt="x-sveltekit-trailing-slash";function ne(e){return e instanceof le||e instanceof Se?e.status:500}function Yt(e){return e instanceof Se?e.text:"Internal Error"}let E,W,me;const Jt=De.toString().includes("$$")||/function \w+\(\) \{\}/.test(De.toString());Jt?(E={data:{},form:null,error:null,params:{},route:{id:null},state:{},status:-1,url:new URL("https://example.com")},W={current:null},me={current:!1}):(E=new class{#e=T({});get data(){return P(this.#e)}set data(t){x(this.#e,t)}#t=T(null);get form(){return P(this.#t)}set form(t){x(this.#t,t)}#n=T(null);get error(){return P(this.#n)}set error(t){x(this.#n,t)}#r=T({});get params(){return P(this.#r)}set params(t){x(this.#r,t)}#a=T({id:null});get route(){return P(this.#a)}set route(t){x(this.#a,t)}#o=T({});get state(){return P(this.#o)}set state(t){x(this.#o,t)}#s=T(-1);get status(){return P(this.#s)}set status(t){x(this.#s,t)}#i=T(new URL("https://example.com"));get url(){return P(this.#i)}set url(t){x(this.#i,t)}},W=new class{#e=T(null);get current(){return P(this.#e)}set current(t){x(this.#e,t)}},me=new class{#e=T(!1);get current(){return P(this.#e)}set current(t){x(this.#e,t)}},tt.v=()=>me.current=!0);function Wt(e){Object.assign(E,e)}const zt="/__data.json",Xt=".html__data.json";function Zt(e){return e.endsWith(".html")?e.replace(/\.html$/,Xt):e.replace(/\/$/,"")+zt}const{tick:Qt}=mt,en=new Set(["icon","shortcut icon","apple-touch-icon"]),$=We(Xe)??{},z=We(ze)??{},C={url:Fe({}),page:Fe({}),navigating:Ae(null),updated:Nt()};function Ue(e){$[e]=fe()}function tn(e,t){let n=e+1;for(;$[n];)delete $[n],n+=1;for(n=t+1;z[n];)delete z[n],n+=1}function q(e){return location.href=e.href,new Promise(()=>{})}async function rt(){if("serviceWorker"in navigator){const e=await navigator.serviceWorker.getRegistration(L||"/");e&&await e.update()}}function Me(){}let Le,ve,re,N,be,v;globalThis.__sveltekit_1ilkn2r.data;const ae=[],oe=[];let O=null;const ee=new Map,at=new Set,nn=new Set,H=new Set;let w={branch:[],error:null,url:null},Te=!1,se=!1,Ge=!0,X=!1,M=!1,ot=!1,Pe=!1,st,k,I,D;const K=new Set,He=new Map;async function kn(e,t,n){document.URL!==location.href&&(location.href=location.href),v=e,await e.hooks.init?.(),Le=Tt(e),N=document.documentElement,be=t,ve=e.nodes[0],re=e.nodes[1],ve(),re(),k=history.state?.[V],I=history.state?.[J],k||(k=I=Date.now(),history.replaceState({...history.state,[V]:k,[J]:I},""));const r=$[k];function a(){r&&(history.scrollRestoration="manual",scrollTo(r.x,r.y))}n?(a(),await hn(be,n)):(await Y({type:"enter",url:Ie(v.hash?gn(new URL(location.href)):location.href),replace_state:!0}),a()),dn()}function rn(){ae.length=0,Pe=!1}function it(e){oe.some(t=>t?.snapshot)&&(z[e]=oe.map(t=>t?.snapshot?.capture()))}function ct(e){z[e]?.forEach((t,n)=>{oe[n]?.snapshot?.restore(t)})}function Ke(){Ue(k),Be(Xe,$),it(I),Be(ze,z)}async function xe(e,t,n,r){let a;const s=await Y({type:"goto",url:Ie(e),keepfocus:t.keepFocus,noscroll:t.noScroll,replace_state:t.replaceState,state:t.state,redirect_count:n,nav_token:r,accept:()=>{t.invalidateAll&&(Pe=!0,a=[...He.keys()]),t.invalidate&&t.invalidate.forEach(un)}});return t.invalidateAll&&$e().then($e).then(()=>{He.forEach(({resource:i},o)=>{a?.includes(o)&&i.refresh?.()})}),s}async function an(e){if(e.id!==O?.id){const t={};K.add(t),O={id:e.id,token:t,promise:ut({...e,preload:t}).then(n=>(K.delete(t),n.type==="loaded"&&n.state.error&&(O=null),n))}}return O.promise}async function ye(e){const t=(await he(e,!1))?.route;t&&await Promise.all([...t.layouts,t.leaf].map(n=>n?.[1]()))}function lt(e,t,n){w=e.state;const r=document.querySelector("style[data-sveltekit]");if(r&&r.remove(),Object.assign(E,e.props.page),st=new v.root({target:t,props:{...e.props,stores:C,components:oe},hydrate:n,sync:!1}),ct(I),n){const a={from:null,to:{params:w.params,route:{id:w.route?.id??null},url:new URL(location.href)},willUnload:!1,type:"enter",complete:Promise.resolve()};H.forEach(s=>s(a))}se=!0}function ie({url:e,params:t,branch:n,status:r,error:a,route:s,form:i}){let o="never";if(L&&(e.pathname===L||e.pathname===L+"/"))o="always";else for(const c of n)c?.slash!==void 0&&(o=c.slash);e.pathname=bt(e.pathname,o),e.search=e.search;const l={type:"loaded",state:{url:e,params:t,branch:n,error:a,route:s},props:{constructors:Gt(n).map(c=>c.node.component),page:je(E)}};i!==void 0&&(l.props.form=i);let f={},d=!E,g=0;for(let c=0;c(o&&(l.route=!0),u[c])}),params:new Proxy(r,{get:(u,c)=>(o&&l.params.add(c),u[c])}),data:s?.data??null,url:Et(n,()=>{o&&(l.url=!0)},u=>{o&&l.search_params.add(u)},v.hash),async fetch(u,c){u instanceof Request&&(c={body:u.method==="GET"||u.method==="HEAD"?void 0:await u.blob(),cache:u.cache,credentials:u.credentials,headers:[...u.headers].length>0?u?.headers:void 0,integrity:u.integrity,keepalive:u.keepalive,method:u.method,mode:u.mode,redirect:u.redirect,referrer:u.referrer,referrerPolicy:u.referrerPolicy,signal:u.signal,...c});const{resolved:h,promise:m}=ft(u,c,n);return o&&d(h.href),m},setHeaders:()=>{},depends:d,parent(){return o&&(l.parent=!0),t()},untrack(u){o=!1;try{return u()}finally{o=!0}}};i=await f.universal.load.call(null,g)??null}return{node:f,loader:e,server:s,universal:f.universal?.load?{type:"data",data:i,uses:l}:null,data:i??s?.data??null,slash:f.universal?.trailingSlash??s?.slash}}function ft(e,t,n){let r=e instanceof Request?e.url:e;const a=new URL(r,n);a.origin===n.origin&&(r=a.href.slice(n.origin.length));const s=se?Lt(r,a.href,t):Ut(r,t);return{resolved:a,promise:s}}function Ye(e,t,n,r,a,s){if(Pe)return!0;if(!a)return!1;if(a.parent&&e||a.route&&t||a.url&&n)return!0;for(const i of a.search_params)if(r.has(i))return!0;for(const i of a.params)if(s[i]!==w.params[i])return!0;for(const i of a.dependencies)if(ae.some(o=>o(new URL(i))))return!0;return!1}function Oe(e,t){return e?.type==="data"?e:e?.type==="skip"?t??null:null}function on(e,t){if(!e)return new Set(t.searchParams.keys());const n=new Set([...e.searchParams.keys(),...t.searchParams.keys()]);for(const r of n){const a=e.searchParams.getAll(r),s=t.searchParams.getAll(r);a.every(i=>s.includes(i))&&s.every(i=>a.includes(i))&&n.delete(r)}return n}function Je({error:e,url:t,route:n,params:r}){return{type:"loaded",state:{error:e,url:t,route:n,params:r,branch:[]},props:{page:je(E),constructors:[]}}}async function ut({id:e,invalidating:t,url:n,params:r,route:a,preload:s}){if(O?.id===e)return K.delete(O.token),O.promise;const{errors:i,layouts:o,leaf:l}=a,f=[...o,l];i.forEach(p=>p?.().catch(()=>{})),f.forEach(p=>p?.[1]().catch(()=>{}));let d=null;const g=w.url?e!==ce(w.url):!1,u=w.route?a.id!==w.route.id:!1,c=on(w.url,n);let h=!1;const m=f.map((p,y)=>{const b=w.branch[y],A=!!p?.[0]&&(b?.loader!==p[1]||Ye(h,u,g,c,b.server?.uses,r));return A&&(h=!0),A});if(m.some(Boolean)){try{d=await pt(n,m)}catch(p){const y=await F(p,{url:n,params:r,route:{id:e}});return K.has(s)?Je({error:y,url:n,params:r,route:a}):de({status:ne(p),error:y,url:n,route:a})}if(d.type==="redirect")return d}const _=d?.nodes;let R=!1;const S=f.map(async(p,y)=>{if(!p)return;const b=w.branch[y],A=_?.[y];if((!A||A.type==="skip")&&p[1]===b?.loader&&!Ye(R,u,g,c,b.universal?.uses,r))return b;if(R=!0,A?.type==="error")throw A;return Ne({loader:p[1],url:n,params:r,route:a,parent:async()=>{const pe={};for(let ge=0;ge{});const U=[];for(let p=0;pPromise.resolve({}),server_data_node:Oe(s)}),l={node:await re(),loader:re,universal:null,server:null,data:null};return ie({url:n,params:a,branch:[o,l],status:e,error:t,route:null})}catch(o){if(o instanceof Ee)return xe(new URL(o.location,location.href),{},0);throw o}}async function cn(e){const t=e.href;if(ee.has(t))return ee.get(t);let n;try{const r=(async()=>{let a=await v.hooks.reroute({url:new URL(e),fetch:async(s,i)=>ft(s,i,e).promise})??e;if(typeof a=="string"){const s=new URL(e);v.hash?s.hash=a:s.pathname=a,a=s}return a})();ee.set(t,r),n=await r}catch{ee.delete(t);return}return n}async function he(e,t){if(e&&!ue(e,L,v.hash)){const n=await cn(e);if(!n)return;const r=ln(n);for(const a of Le){const s=a.exec(r);if(s)return{id:ce(e),invalidating:t,route:a,params:At(s),url:e}}}}function ln(e){return kt(v.hash?e.hash.replace(/^#/,"").replace(/[?#].+/,""):e.pathname.slice(L.length))||"/"}function ce(e){return(v.hash?e.hash.replace(/^#/,""):e.pathname)+e.search}function dt({url:e,type:t,intent:n,delta:r}){let a=!1;const s=Ce(w,n,e,t);r!==void 0&&(s.navigation.delta=r);const i={...s.navigation,cancel:()=>{a=!0,s.reject(new Error("navigation cancelled"))}};return X||at.forEach(o=>o(i)),a?null:s}async function Y({type:e,url:t,popped:n,keepfocus:r,noscroll:a,replace_state:s,state:i={},redirect_count:o=0,nav_token:l={},accept:f=Me,block:d=Me}){const g=D;D=l;const u=await he(t,!1),c=e==="enter"?Ce(w,u,t,e):dt({url:t,type:e,delta:n?.delta,intent:u});if(!c){d(),D===l&&(D=g);return}const h=k,m=I;f(),X=!0,se&&c.navigation.type!=="enter"&&C.navigating.set(W.current=c.navigation);let _=u&&await ut(u);if(!_){if(ue(t,L,v.hash))return await q(t);_=await ht(t,{id:null},await F(new Se(404,"Not Found",`Not found: ${t.pathname}`),{url:t,params:{},route:{id:null}}),404)}if(t=u?.url||t,D!==l)return c.reject(new Error("navigation aborted")),!1;if(_.type==="redirect")if(o>=20)_=await de({status:500,error:await F(new Error("Redirect loop"),{url:t,params:{},route:{id:null}}),url:t,route:{id:null}});else return await xe(new URL(_.location,t).href,{},o+1,l),!1;else _.props.page.status>=400&&await C.updated.check()&&(await rt(),await q(t));if(rn(),Ue(h),it(m),_.props.page.url.pathname!==t.pathname&&(t.pathname=_.props.page.url.pathname),i=n?n.state:i,!n){const p=s?0:1,y={[V]:k+=p,[J]:I+=p,[Ze]:i};(s?history.replaceState:history.pushState).call(history,y,"",t),s||tn(k,I)}if(O=null,_.props.page.state=i,se){const p=(await Promise.all(Array.from(nn,y=>y(c.navigation)))).filter(y=>typeof y=="function");if(p.length>0){let y=function(){p.forEach(b=>{H.delete(b)})};p.push(y),p.forEach(b=>{H.add(b)})}w=_.state,_.props.page&&(_.props.page.url=t),st.$set(_.props),Wt(_.props.page),ot=!0}else lt(_,be,!1);const{activeElement:R}=document;await Qt();const S=n?n.scroll:a?fe():null;if(Ge){const p=t.hash&&document.getElementById(_t(t));S?scrollTo(S.x,S.y):p?p.scrollIntoView():scrollTo(0,0)}const U=document.activeElement!==R&&document.activeElement!==document.body;!r&&!U&&pn(t),Ge=!0,_.props.page&&Object.assign(E,_.props.page),X=!1,e==="popstate"&&ct(I),c.fulfil(void 0),H.forEach(p=>p(c.navigation)),C.navigating.set(W.current=null)}async function ht(e,t,n,r){return e.origin===Z&&e.pathname===location.pathname&&!Te?await de({status:r,error:n,url:e,route:t}):await q(e)}function fn(){let e,t,n;N.addEventListener("mousemove",o=>{const l=o.target;clearTimeout(e),e=setTimeout(()=>{s(l,j.hover)},20)});function r(o){o.defaultPrevented||s(o.composedPath()[0],j.tap)}N.addEventListener("mousedown",r),N.addEventListener("touchstart",r,{passive:!0});const a=new IntersectionObserver(o=>{for(const l of o)l.isIntersecting&&(ye(new URL(l.target.href)),a.unobserve(l.target))},{threshold:0});async function s(o,l){const f=et(o,N),d=f===t&&l>=n;if(!f||d)return;const{url:g,external:u,download:c}=we(f,L,v.hash);if(u||c)return;const h=te(f),m=g&&ce(w.url)===ce(g);if(!(h.reload||m))if(l<=h.preload_data){t=f,n=j.tap;const _=await he(g,!1);if(!_)return;an(_)}else l<=h.preload_code&&(t=f,n=l,ye(g))}function i(){a.disconnect();for(const o of N.querySelectorAll("a")){const{url:l,external:f,download:d}=we(o,L,v.hash);if(f||d)continue;const g=te(o);g.reload||(g.preload_code===j.viewport&&a.observe(o),g.preload_code===j.eager&&ye(l))}}H.add(i),i()}function F(e,t){if(e instanceof le)return e.body;const n=ne(e),r=Yt(e);return v.hooks.handleError({error:e,event:t,status:n,message:r})??{message:r}}function An(e,t={}){return e=new URL(Ie(e)),e.origin!==Z?Promise.reject(new Error("goto: invalid URL")):xe(e,t,0)}function un(e){if(typeof e=="function")ae.push(e);else{const{href:t}=new URL(e,location.href);ae.push(n=>n.href===t)}}function dn(){history.scrollRestoration="manual",addEventListener("beforeunload",t=>{let n=!1;if(Ke(),!X){const r=Ce(w,void 0,null,"leave"),a={...r.navigation,cancel:()=>{n=!0,r.reject(new Error("navigation cancelled"))}};at.forEach(s=>s(a))}n?(t.preventDefault(),t.returnValue=""):history.scrollRestoration="auto"}),addEventListener("visibilitychange",()=>{document.visibilityState==="hidden"&&Ke()}),navigator.connection?.saveData||fn(),N.addEventListener("click",async t=>{if(t.button||t.which!==1||t.metaKey||t.ctrlKey||t.shiftKey||t.altKey||t.defaultPrevented)return;const n=et(t.composedPath()[0],N);if(!n)return;const{url:r,external:a,target:s,download:i}=we(n,L,v.hash);if(!r)return;if(s==="_parent"||s==="_top"){if(window.parent!==window)return}else if(s&&s!=="_self")return;const o=te(n);if(!(n instanceof SVGAElement)&&r.protocol!==location.protocol&&!(r.protocol==="https:"||r.protocol==="http:")||i)return;const[f,d]=(v.hash?r.hash.replace(/^#/,""):r.href).split("#"),g=f===_e(location);if(a||o.reload&&(!g||!d)){dt({url:r,type:"link"})?X=!0:t.preventDefault();return}if(d!==void 0&&g){const[,u]=w.url.href.split("#");if(u===d){if(t.preventDefault(),d===""||d==="top"&&n.ownerDocument.getElementById("top")===null)window.scrollTo({top:0});else{const c=n.ownerDocument.getElementById(decodeURIComponent(d));c&&(c.scrollIntoView(),c.focus())}return}if(M=!0,Ue(k),e(r),!o.replace_state)return;M=!1}t.preventDefault(),await new Promise(u=>{requestAnimationFrame(()=>{setTimeout(u,0)}),setTimeout(u,100)}),await Y({type:"link",url:r,keepfocus:o.keepfocus,noscroll:o.noscroll,replace_state:o.replace_state??r.href===location.href})}),N.addEventListener("submit",t=>{if(t.defaultPrevented)return;const n=HTMLFormElement.prototype.cloneNode.call(t.target),r=t.submitter;if((r?.formTarget||n.target)==="_blank"||(r?.formMethod||n.method)!=="get")return;const i=new URL(r?.hasAttribute("formaction")&&r?.formAction||n.action);if(ue(i,L,!1))return;const o=t.target,l=te(o);if(l.reload)return;t.preventDefault(),t.stopPropagation();const f=new FormData(o),d=r?.getAttribute("name");d&&f.append(d,r?.getAttribute("value")??""),i.search=new URLSearchParams(f).toString(),Y({type:"form",url:i,keepfocus:l.keepfocus,noscroll:l.noscroll,replace_state:l.replace_state??i.href===location.href})}),addEventListener("popstate",async t=>{if(!ke){if(t.state?.[V]){const n=t.state[V];if(D={},n===k)return;const r=$[n],a=t.state[Ze]??{},s=new URL(t.state[xt]??location.href),i=t.state[J],o=w.url?_e(location)===_e(w.url):!1;if(i===I&&(ot||o)){a!==E.state&&(E.state=a),e(s),$[k]=fe(),r&&scrollTo(r.x,r.y),k=n;return}const f=n-k;await Y({type:"popstate",url:s,popped:{state:a,scroll:r,delta:f},accept:()=>{k=n,I=i},block:()=>{history.go(-f)},nav_token:D})}else if(!M){const n=new URL(location.href);e(n),v.hash&&location.reload()}}}),addEventListener("hashchange",()=>{M&&(M=!1,history.replaceState({...history.state,[V]:++k,[J]:I},"",location.href))});for(const t of document.querySelectorAll("link"))en.has(t.rel)&&(t.href=t.href);addEventListener("pageshow",t=>{t.persisted&&C.navigating.set(W.current=null)});function e(t){w.url=E.url=t,C.page.set(je(E)),C.page.notify()}}async function hn(e,{status:t=200,error:n,node_ids:r,params:a,route:s,server_route:i,data:o,form:l}){Te=!0;const f=new URL(location.href);let d;({params:a={},route:s={id:null}}=await he(f,!1)||{}),d=Le.find(({id:c})=>c===s.id);let g,u=!0;try{const c=r.map(async(m,_)=>{const R=o[_];return R?.uses&&(R.uses=gt(R.uses)),Ne({loader:v.nodes[m],url:f,params:a,route:s,parent:async()=>{const S={};for(let U=0;U<_;U+=1)Object.assign(S,(await c[U]).data);return S},server_data_node:Oe(R)})}),h=await Promise.all(c);if(d){const m=d.layouts;for(let _=0;_s?"1":"0").join(""));const r=window.fetch,a=await r(n.href,{});if(!a.ok){let s;throw a.headers.get("content-type")?.includes("application/json")?s=await a.json():a.status===404?s="Not Found":a.status===500&&(s="Internal Error"),new le(a.status,s)}return new Promise(async s=>{const i=new Map,o=a.body.getReader(),l=new TextDecoder;function f(g){return qt(g,{...v.decoders,Promise:u=>new Promise((c,h)=>{i.set(u,{fulfil:c,reject:h})})})}let d="";for(;;){const{done:g,value:u}=await o.read();if(g&&!d)break;for(d+=!u&&d?` -`:l.decode(u,{stream:!0});;){const c=d.indexOf(` -`);if(c===-1)break;const h=JSON.parse(d.slice(0,c));if(d=d.slice(c+1),h.type==="redirect")return s(h);if(h.type==="data")h.nodes?.forEach(m=>{m?.type==="data"&&(m.uses=gt(m.uses),m.data=f(m.data))}),s(h);else if(h.type==="chunk"){const{id:m,data:_,error:R}=h,S=i.get(m);i.delete(m),R?S.reject(f(R)):S.fulfil(f(_))}}}})}function gt(e){return{dependencies:new Set(e?.dependencies??[]),params:new Set(e?.params??[]),parent:!!e?.parent,route:!!e?.route,url:!!e?.url,search_params:new Set(e?.search_params??[])}}let ke=!1;function pn(e){const t=document.querySelector("[autofocus]");if(t)t.focus();else{const n=_t(e);if(n&&document.getElementById(n)){const{x:a,y:s}=fe();setTimeout(()=>{const i=history.state;ke=!0,location.replace(`#${n}`),v.hash&&location.replace(e.hash),history.replaceState(i,"",e.hash),scrollTo(a,s),ke=!1})}else{const a=document.body,s=a.getAttribute("tabindex");a.tabIndex=-1,a.focus({preventScroll:!0,focusVisible:!1}),s!==null?a.setAttribute("tabindex",s):a.removeAttribute("tabindex")}const r=getSelection();if(r&&r.type!=="None"){const a=[];for(let s=0;s{if(r.rangeCount===a.length){for(let s=0;s{a=l,s=f});return i.catch(()=>{}),{navigation:{from:{params:e.params,route:{id:e.route?.id??null},url:e.url},to:n&&{params:t?.params??null,route:{id:t?.route?.id??null},url:n},willUnload:!t,type:r,complete:i},fulfil:a,reject:s}}function je(e){return{data:e.data,error:e.error,form:e.form,params:e.params,route:e.route,state:e.state,status:e.status,url:e.url}}function gn(e){const t=new URL(e);return t.hash=decodeURIComponent(e.hash),t}function _t(e){let t;if(v.hash){const[,,n]=e.hash.split("#",3);t=n??""}else t=e.hash.slice(1);return decodeURIComponent(t)}export{kn as a,An as g,yn as l,E as p,C as s}; diff --git a/webapp/assets/_app/immutable/chunks/CVQRp8zk.js b/webapp/assets/_app/immutable/chunks/CVQRp8zk.js deleted file mode 100644 index ee40b57a..00000000 --- a/webapp/assets/_app/immutable/chunks/CVQRp8zk.js +++ /dev/null @@ -1 +0,0 @@ -import{I as w}from"./sWNKMed7.js";import{g as r}from"./DVl4ZBgx.js";const m=!0,z=m,I=()=>window.location.port==="5173",b={isAuthenticated:!1,user:null,loading:!0,needsInitialization:!1},n=w(b);function f(t,a,e=7){const i=new Date;i.setTime(i.getTime()+e*24*60*60*1e3),document.cookie=`${t}=${a};expires=${i.toUTCString()};path=/;SameSite=Lax`}function d(t){const a=t+"=",e=document.cookie.split(";");for(let i=0;i({...i,loading:!0}));const e=await r.login({username:t,password:a});z&&(f("garm_token",e.token),f("garm_user",t)),r.setToken(e.token),n.set({isAuthenticated:!0,user:t,loading:!1,needsInitialization:!1})}catch(e){throw n.update(i=>({...i,loading:!1})),e}},logout(){g("garm_token"),g("garm_user"),n.set({isAuthenticated:!1,user:null,loading:!1,needsInitialization:!1})},async init(){try{n.update(e=>({...e,loading:!0})),await c.checkInitializationStatus();const t=d("garm_token"),a=d("garm_user");if(t&&a&&(r.setToken(t),await c.checkAuth())){n.set({isAuthenticated:!0,user:a,loading:!1,needsInitialization:!1});return}n.update(e=>({...e,loading:!1,needsInitialization:!1}))}catch{n.update(a=>({...a,loading:!1}))}},async checkInitializationStatus(){try{const t={Accept:"application/json"},a=d("garm_token"),e=I();e&&a&&(t.Authorization=`Bearer ${a}`);const i=await fetch("/api/v1/login",{method:"GET",headers:t,credentials:e?"omit":"include"});if(!i.ok){if(i.status===409&&(await i.json()).error==="init_required")throw n.update(s=>({...s,needsInitialization:!0,loading:!1})),new Error("Initialization required");return}return}catch(t){if(t instanceof Error&&t.message==="Initialization required")throw t;return}},async checkAuth(){try{return await c.checkInitializationStatus(),await r.getControllerInfo(),!0}catch(t){return t instanceof Error&&t.message==="Initialization required"?!1:t?.response?.status===409&&t?.response?.data?.error==="init_required"?(n.update(a=>({...a,needsInitialization:!0,loading:!1})),!1):(c.logout(),!1)}},async initialize(t,a,e,i,o){try{n.update(u=>({...u,loading:!0}));const s=await r.firstRun({username:t,email:a,password:e,full_name:i||t});await c.login(t,e);const l=window.location.origin,h=o?.metadataUrl||`${l}/api/v1/metadata`,p=o?.callbackUrl||`${l}/api/v1/callbacks`,k=o?.webhookUrl||`${l}/webhooks`;await r.updateController({metadata_url:h,callback_url:p,webhook_url:k}),n.update(u=>({...u,needsInitialization:!1}))}catch(s){throw n.update(l=>({...l,loading:!1})),s}}};export{n as a,c as b}; diff --git a/webapp/assets/_app/immutable/chunks/CaVdfWt-.js b/webapp/assets/_app/immutable/chunks/CaVdfWt-.js deleted file mode 100644 index 630ef2bd..00000000 --- a/webapp/assets/_app/immutable/chunks/CaVdfWt-.js +++ /dev/null @@ -1 +0,0 @@ -import"./DsnmJJEf.js";import{i as K}from"./zNh6Oe5P.js";import{p as O,f as U,j as e,r as t,k as s,n as c,u as o,z as at,t as q,v as g,c as h,d as Q,E as mt,D as ut,B as Y,b as Z,g as P}from"./sWNKMed7.js";import{p as T,i as H,s as gt,a as pt}from"./Ccl3fNd2.js";import{c as rt,d as tt,B as xt}from"./DVl4ZBgx.js";import{b as R,B as $,e as ft}from"./cjRLNre3.js";import{e as yt}from"./DA-798Ko.js";import{D as ht,G as et}from"./DCYYzf48.js";import{E as _t}from"./DAWfW-VQ.js";import{S as kt}from"./BJXodF8n.js";import{e as bt,i as wt}from"./BuuPrWMc.js";import{b as Ct}from"./_gFYyMUN.js";var Pt=U('

                ID
                Created At
                Updated At
                Status
                Pool Balancer Type
                ');function $t(L,v){O(v,!1);let a=T(v,"entity",8),p=T(v,"entityType",8);function N(){return`${p().charAt(0).toUpperCase()+p().slice(1)} Information`}function n(){if(!a().endpoint?.base_url)return"#";switch(p()){case"repository":const d=a();return`${a().endpoint.base_url}/${d.owner}/${a().name}`;case"organization":return`${a().endpoint.base_url}/${a().name}`;case"enterprise":return`${a().endpoint.base_url}/enterprises/${a().name}`;default:return"#"}}function E(){return`${p().charAt(0).toUpperCase()+p().slice(1)} URL`}function V(){const d=a().pool_balancing_type;if(!d||d===""||d==="none")return"Round Robin (default)";switch(d){case"roundrobin":return"Round Robin";case"pack":return"Pack";default:return d}}K();var m=Pt(),x=e(m),_=e(x),I=e(_,!0);t(_);var k=s(_,2),i=e(k),f=s(e(i),2),b=e(f,!0);t(f),t(i);var u=s(i,2),D=s(e(u),2),M=e(D,!0);t(D),t(u);var w=s(u,2),S=s(e(w),2),G=e(S,!0);t(S),t(w);var r=s(w,2),C=s(e(r),2),l=e(C);{var j=d=>{$(d,{variant:"success",text:"Running"})},z=d=>{$(d,{variant:"error",text:"Stopped"})};H(l,d=>{c(a()),o(()=>a().pool_manager_status?.running)?d(j):d(z,!1)})}t(C),t(r);var B=s(r,2),A=s(e(B),2),y=e(A,!0);t(A),t(B);var W=s(B,2),F=e(W),st=e(F,!0);t(F);var X=s(F,2),J=e(X),ot=e(J);at(),t(J),t(X),t(W),t(k),t(x),t(m),q((d,it,dt,nt,lt,vt,ct)=>{g(I,d),g(b,(c(a()),o(()=>a().id))),g(M,it),g(G,dt),g(y,nt),g(st,lt),rt(J,"href",vt),g(ot,`${ct??""} `)},[()=>o(N),()=>(c(R),c(a()),o(()=>R(a().created_at))),()=>(c(R),c(a()),o(()=>R(a().updated_at))),()=>o(V),()=>o(E),()=>o(n),()=>o(n)]),h(L,m),Q()}var Tt=U('

                No pools configured

                '),Et=U('');function Wt(L,v){O(v,!1);const[a,p]=gt(),N=()=>pt(yt,"$eagerCache",a);let n=T(v,"pools",8),E=T(v,"entityType",8),V=T(v,"entityId",8,""),m=T(v,"entityName",8,"");const x=mt();function _(){x("addPool",{entityType:E(),entityId:V(),entityName:m()})}const I=[{key:"id",title:"ID",flexible:!0,cellComponent:_t,cellProps:{entityType:"pool",showId:!0,fontMono:!0}},{key:"image",title:"Image",flexible:!0,cellComponent:et,cellProps:{field:"image",type:"code",showTitle:!0}},{key:"provider",title:"Provider",cellComponent:et,cellProps:{field:"provider_name"}},{key:"status",title:"Status",cellComponent:kt,cellProps:{statusType:"enabled"}}],k={entityType:"pool",primaryText:{field:"id",isClickable:!0,href:"/pools/{id}",useId:!0,isMonospace:!0},secondaryText:{field:"entity_name",computedValue:r=>ft(r,N())},badges:[{type:"custom",value:r=>({variant:r.enabled?"success":"error",text:r.enabled?"Enabled":"Disabled"})}]};K();var i=Et(),f=e(i),b=e(f),u=e(b),D=e(u);t(u);var M=s(u,2);t(b);var w=s(b,2);{var S=r=>{var C=Tt(),l=s(e(C),4),j=e(l);t(l);var z=s(l,2),B=e(z);xt(B,{variant:"primary",size:"sm",$$events:{click:_},children:(A,y)=>{at();var W=ut("Add Pool");h(A,W)},$$slots:{default:!0}}),t(z),t(C),q(()=>g(j,`No pools configured for this ${E()??""}.`)),h(r,C)},G=r=>{ht(r,{get columns(){return I},get data(){return n()},loading:!1,error:"",searchTerm:"",showSearch:!1,showPagination:!1,currentPage:1,get perPage(){return c(n()),o(()=>n().length)},totalPages:1,get totalItems(){return c(n()),o(()=>n().length)},itemName:"pools",emptyTitle:"No pools configured",get emptyMessage(){return`No pools configured for this ${E()??""}.`},emptyIconType:"cog",get mobileCardConfig(){return k}})};H(w,r=>{c(n()),o(()=>n().length===0)?r(S):r(G,!1)})}t(f),t(i),q(r=>{g(D,`Pools (${c(n()),o(()=>n().length)??""})`),rt(M,"href",r)},[()=>(c(tt),o(()=>tt("/pools")))]),h(L,i),Q(),p()}var It=U('

                '),Bt=U('

                Events

                '),Nt=U('

                Events

                No events available

                ');function qt(L,v){O(v,!1);let a=T(v,"events",8),p=T(v,"eventsContainer",12,void 0);K();var N=Y(),n=Z(N);{var E=m=>{var x=Bt(),_=e(x),I=s(e(_),2);bt(I,5,a,wt,(k,i)=>{var f=It(),b=e(f),u=e(b),D=e(u,!0);t(u);var M=s(u,2),w=e(M);{var S=l=>{$(l,{variant:"error",text:"Error"})},G=l=>{var j=Y(),z=Z(j);{var B=y=>{$(y,{variant:"warning",text:"Warning"})},A=y=>{$(y,{variant:"info",text:"Info"})};H(z,y=>{P(i),o(()=>(P(i).event_level||"info").toLowerCase()==="warning")?y(B):y(A,!1)},!0)}h(l,j)};H(w,l=>{P(i),o(()=>(P(i).event_level||"info").toLowerCase()==="error")?l(S):l(G,!1)})}var r=s(w,2),C=e(r,!0);t(r),t(M),t(b),t(f),q(l=>{g(D,(P(i),o(()=>P(i).message))),g(C,l)},[()=>(c(R),P(i),o(()=>R(P(i).created_at)))]),h(k,f)}),t(I),Ct(I,k=>p(k),()=>p()),t(_),t(x),h(m,x)},V=m=>{var x=Nt();h(m,x)};H(n,m=>{c(a()),o(()=>a()&&a().length>0)?m(E):m(V,!1)})}h(L,N),Q()}export{$t as E,Wt as P,qt as a}; diff --git a/webapp/assets/_app/immutable/chunks/Ccl3fNd2.js b/webapp/assets/_app/immutable/chunks/Ccl3fNd2.js deleted file mode 100644 index ae407549..00000000 --- a/webapp/assets/_app/immutable/chunks/Ccl3fNd2.js +++ /dev/null @@ -1 +0,0 @@ -import{M as K,K as T,L as j,N as C,_ as F,a0 as q,a1 as $,Y as z,a2 as x,O as G,P as A,Q as H,at as J,R as Z,aa as Q,U as V,T as W,au as D,m as X,av as k,s as U,J as ee,g as m,aw as re,ax as ne,ay as w,az as se,aA as M,ar as ae,q as ie,aB as te,aj as R,aC as ue,a6 as fe,aD as le,u as oe,aE as ce,aF as de,aG as _e,aH as N,aI as L,aJ as pe,aK as ve,S as Y,aL as B,aM as S}from"./sWNKMed7.js";function Ie(e,r,s=!1){T&&j();var n=e,a=null,i=null,l=J,d=s?C:0,p=!1;const P=(o,u=!0)=>{p=!0,_(u,o)};var f=null;function I(){f!==null&&(f.lastChild.remove(),n.before(f),f=null);var o=l?a:i,u=l?i:a;o&&Q(o),u&&V(u,()=>{l?i=null:a=null})}const _=(o,u)=>{if(l===(l=o))return;let g=!1;if(T){const E=F(n)===q;!!l===E&&(n=$(),z(n),x(!1),g=!0)}var b=Z(),c=n;if(b&&(f=document.createDocumentFragment(),f.append(c=G())),l?a??=u&&A(()=>u(c)):i??=u&&A(()=>u(c)),b){var h=H,t=l?a:i,v=l?i:a;t&&h.skipped_effects.delete(t),v&&h.skipped_effects.add(v),h.add_callback(I)}else I();g&&x(!0)};K(()=>{p=!1,r(P),p||_(null,null)},d),T&&(n=W)}let O=!1,y=Symbol();function ge(e,r,s){const n=s[r]??={store:null,source:X(void 0),unsubscribe:D};if(n.store!==e&&!(y in s))if(n.unsubscribe(),n.store=e??null,e==null)n.source.v=void 0,n.unsubscribe=D;else{var a=!0;n.unsubscribe=k(e,i=>{a?n.source.v=i:U(n.source,i)}),a=!1}return e&&y in s?ee(e):m(n.source)}function Ee(){const e={};function r(){re(()=>{for(var s in e)e[s].unsubscribe();ne(e,y,{enumerable:!1,value:!0})})}return[e,r]}function be(e){var r=O;try{return O=!1,[e(),O]}finally{O=r}}const he={get(e,r){if(!e.exclude.includes(r))return m(e.version),r in e.special?e.special[r]():e.props[r]},set(e,r,s){if(!(r in e.special)){var n=R;try{L(e.parent_effect),e.special[r]=me({get[r](){return e.props[r]}},r,M)}finally{L(n)}}return e.special[r](s),N(e.version),!0},getOwnPropertyDescriptor(e,r){if(!e.exclude.includes(r)&&r in e.props)return{enumerable:!0,configurable:!0,value:e.props[r]}},deleteProperty(e,r){return e.exclude.includes(r)||(e.exclude.push(r),N(e.version)),!0},has(e,r){return e.exclude.includes(r)?!1:r in e.props},ownKeys(e){return Reflect.ownKeys(e.props).filter(r=>!e.exclude.includes(r))}};function Oe(e,r){return new Proxy({props:e,exclude:r,special:{},version:fe(0),parent_effect:R},he)}const Se={get(e,r){let s=e.props.length;for(;s--;){let n=e.props[s];if(S(n)&&(n=n()),typeof n=="object"&&n!==null&&r in n)return n[r]}},set(e,r,s){let n=e.props.length;for(;n--;){let a=e.props[n];S(a)&&(a=a());const i=w(a,r);if(i&&i.set)return i.set(s),!0}return!1},getOwnPropertyDescriptor(e,r){let s=e.props.length;for(;s--;){let n=e.props[s];if(S(n)&&(n=n()),typeof n=="object"&&n!==null&&r in n){const a=w(n,r);return a&&!a.configurable&&(a.configurable=!0),a}}},has(e,r){if(r===Y||r===B)return!1;for(let s of e.props)if(S(s)&&(s=s()),s!=null&&r in s)return!0;return!1},ownKeys(e){const r=[];for(let s of e.props)if(S(s)&&(s=s()),!!s){for(const n in s)r.includes(n)||r.push(n);for(const n of Object.getOwnPropertySymbols(s))r.includes(n)||r.push(n)}return r}};function Te(...e){return new Proxy({props:e},Se)}function me(e,r,s,n){var a=!ce||(s&de)!==0,i=(s&le)!==0,l=(s&pe)!==0,d=n,p=!0,P=()=>(p&&(p=!1,d=l?oe(n):n),d),f;if(i){var I=Y in e||B in e;f=w(e,r)?.set??(I&&r in e?t=>e[r]=t:void 0)}var _,o=!1;i?[_,o]=be(()=>e[r]):_=e[r],_===void 0&&n!==void 0&&(_=P(),f&&(a&&se(),f(_)));var u;if(a?u=()=>{var t=e[r];return t===void 0?P():(p=!0,t)}:u=()=>{var t=e[r];return t!==void 0&&(d=void 0),t===void 0?d:t},a&&(s&M)===0)return u;if(f){var g=e.$$legacy;return function(t,v){return arguments.length>0?((!a||!v||g||o)&&f(v?u():t),t):u()}}var b=!1,c=((s&_e)!==0?ae:ie)(()=>(b=!1,u()));i&&m(c);var h=R;return function(t,v){if(arguments.length>0){const E=v?m(c):a&&i?te(t):t;return U(c,E),b=!0,d!==void 0&&(d=E),t}return ve&&b||(h.f&ue)!==0?c.v:m(c)}}export{ge as a,Te as b,Ie as i,Oe as l,me as p,Ee as s}; diff --git a/webapp/assets/_app/immutable/chunks/CkYhV7Br.js b/webapp/assets/_app/immutable/chunks/CkYhV7Br.js deleted file mode 100644 index 6a8184ad..00000000 --- a/webapp/assets/_app/immutable/chunks/CkYhV7Br.js +++ /dev/null @@ -1,4 +0,0 @@ -import"./DsnmJJEf.js";import{i as g}from"./zNh6Oe5P.js";import{p as k,l as x,s as d,m as w,n as y,a as J,f as m,j as z,w as j,k as L,g as c,r as B,t as C,c as n,d as E}from"./sWNKMed7.js";import{p as o,i as M}from"./Ccl3fNd2.js";import{c as f,s as N}from"./DVl4ZBgx.js";import{b as O}from"./CLagxtgo.js";var S=m('
                '),V=m('
                ');function I(p,r){k(r,!1);let t=o(r,"value",12,""),u=o(r,"placeholder",8,"{}"),b=o(r,"rows",8,4),i=o(r,"disabled",8,!1),a=w(!0);x(()=>y(t()),()=>{if(t().trim())try{JSON.parse(t()),d(a,!0)}catch{d(a,!1)}else d(a,!0)}),J(),g();var l=V(),e=z(l);j(e);var v=L(e,2);{var h=s=>{var _=S();n(s,_)};M(v,s=>{c(a)||s(h)})}B(l),C(()=>{f(e,"placeholder",u()),f(e,"rows",b()),e.disabled=i(),N(e,1,`w-full px-3 py-2 border rounded-md shadow-sm focus:outline-none focus:ring-blue-500 focus:border-blue-500 font-mono text-sm resize-none - ${c(a)?"border-gray-300 dark:border-gray-600 bg-white dark:bg-gray-700 text-gray-900 dark:text-white":"border-red-300 dark:border-red-600 bg-red-50 dark:bg-red-900/20 text-red-900 dark:text-red-100"} - ${i()?"opacity-50 cursor-not-allowed":""} - `)}),O(e,t),n(p,l),E()}export{I as J}; diff --git a/webapp/assets/_app/immutable/chunks/Clig3Vwb.js b/webapp/assets/_app/immutable/chunks/Clig3Vwb.js deleted file mode 100644 index dc70a0e3..00000000 --- a/webapp/assets/_app/immutable/chunks/Clig3Vwb.js +++ /dev/null @@ -1 +0,0 @@ -import"./DsnmJJEf.js";import{i as ae}from"./zNh6Oe5P.js";import{p as se,E as re,l as M,n as ie,s as r,g as t,m as k,a as le,f as p,j as v,k as $,r as f,c as l,d as oe,B as T,b as E,z as V,D as q,t as F,v as N,u as ne}from"./sWNKMed7.js";import{p as R,i as m}from"./Ccl3fNd2.js";import{g as u,B as G}from"./DVl4ZBgx.js";import{t as y}from"./BZUCTtPY.js";import{e as de}from"./BZiHL9L3.js";var ce=p('
                Checking...
                '),ve=p('
                '),fe=p('
                Webhook installed
                ',1),ue=p('
                No webhook installed
                '),he=p('

                Webhook Status

                ');function _e(H,g){se(g,!1);const x=k();let h=R(g,"entityType",8),s=R(g,"entityId",8),j=R(g,"entityName",8),i=k(null),o=k(!1),b=k(!0);const A=re();async function _(){if(s())try{r(b,!0),h()==="repository"?r(i,await u.getRepositoryWebhookInfo(s())):r(i,await u.getOrganizationWebhookInfo(s()))}catch(e){e&&typeof e=="object"&&"response"in e&&e.response?.status===404?r(i,null):(console.warn("Failed to check webhook status:",e),r(i,null))}finally{r(b,!1)}}async function J(){if(s())try{r(o,!0),h()==="repository"?await u.installRepositoryWebhook(s()):await u.installOrganizationWebhook(s()),y.success("Webhook Installed",`Webhook for ${h()} ${j()} has been installed successfully.`),await _(),A("webhookStatusChanged",{installed:!0})}catch(e){y.error("Webhook Installation Failed",e instanceof Error?e.message:"Failed to install webhook.")}finally{r(o,!1)}}async function K(){if(s())try{r(o,!0),h()==="repository"?await u.uninstallRepositoryWebhook(s()):await u.uninstallOrganizationWebhook(s()),y.success("Webhook Uninstalled",`Webhook for ${h()} ${j()} has been uninstalled successfully.`),await _(),A("webhookStatusChanged",{installed:!1})}catch(e){y.error("Webhook Uninstall Failed",de(e))}finally{r(o,!1)}}M(()=>ie(s()),()=>{s()&&_()}),M(()=>t(i),()=>{r(x,t(i)&&t(i).active)}),le(),ae();var w=he(),O=v(w),P=v(O),W=v(P),D=$(v(W),2),Q=v(D);{var X=e=>{var d=ce();l(e,d)},Y=e=>{var d=T(),z=E(d);{var I=a=>{var n=fe(),B=$(E(n),2);{var c=C=>{var U=ve(),te=v(U);f(U),F(()=>N(te,`URL: ${t(i),ne(()=>t(i).url||"N/A")??""}`)),l(C,U)};m(B,C=>{t(i)&&C(c)})}l(a,n)},S=a=>{var n=ue();l(a,n)};m(z,a=>{t(x)?a(I):a(S,!1)},!0)}l(e,d)};m(Q,e=>{t(b)?e(X):e(Y,!1)})}f(D),f(W);var L=$(W,2),Z=v(L);{var ee=e=>{var d=T(),z=E(d);{var I=a=>{G(a,{variant:"danger",size:"sm",get disabled(){return t(o)},$$events:{click:K},children:(n,B)=>{V();var c=q();F(()=>N(c,t(o)?"Uninstalling...":"Uninstall")),l(n,c)},$$slots:{default:!0}})},S=a=>{G(a,{variant:"primary",size:"sm",get disabled(){return t(o)},$$events:{click:J},children:(n,B)=>{V();var c=q();F(()=>N(c,t(o)?"Installing...":"Install Webhook")),l(n,c)},$$slots:{default:!0}})};m(z,a=>{t(x)?a(I):a(S,!1)})}l(e,d)};m(Z,e=>{t(b)||e(ee)})}f(L),f(P),f(O),f(w),l(H,w),oe()}export{_e as W}; diff --git a/webapp/assets/_app/immutable/chunks/D4Caz1gY.js b/webapp/assets/_app/immutable/chunks/D4Caz1gY.js deleted file mode 100644 index 85ca9d43..00000000 --- a/webapp/assets/_app/immutable/chunks/D4Caz1gY.js +++ /dev/null @@ -1 +0,0 @@ -function r(t){return function(...e){var n=e[0];return n.preventDefault(),t?.apply(this,e)}}export{r as p}; diff --git a/webapp/assets/_app/immutable/chunks/DA-798Ko.js b/webapp/assets/_app/immutable/chunks/DA-798Ko.js deleted file mode 100644 index a8a114e7..00000000 --- a/webapp/assets/_app/immutable/chunks/DA-798Ko.js +++ /dev/null @@ -1 +0,0 @@ -import{I as p,J as l}from"./sWNKMed7.js";import{g as d}from"./DVl4ZBgx.js";import{w as r}from"./BuuPrWMc.js";const f={repositories:[],organizations:[],enterprises:[],pools:[],scalesets:[],credentials:[],endpoints:[],controllerInfo:null,loading:{repositories:!1,organizations:!1,enterprises:!1,pools:!1,scalesets:!1,credentials:!1,endpoints:!1,controllerInfo:!1},loaded:{repositories:!1,organizations:!1,enterprises:!1,pools:!1,scalesets:!1,credentials:!1,endpoints:!1,controllerInfo:!1},errorMessages:{repositories:"",organizations:"",enterprises:"",pools:"",scalesets:"",credentials:"",endpoints:"",controllerInfo:""}},a=p(f);class u{unsubscribers=[];loadingPromises=new Map;retryAttempts=new Map;MAX_RETRIES=3;RETRY_DELAY_MS=1e3;websocketStatusUnsubscriber=null;async loadResource(e,t=!1){if(this.loadingPromises.has(e))return this.loadingPromises.get(e);a.update(o=>({...o,loading:{...o.loading,[e]:!0},errorMessages:{...o.errorMessages,[e]:""}}));const s=this.attemptLoad(e);this.loadingPromises.set(e,s);try{const o=await s;return a.update(n=>({...n,[e]:o,loading:{...n.loading,[e]:!1},loaded:{...n.loaded,[e]:!0},errorMessages:{...n.errorMessages,[e]:""}})),this.retryAttempts.delete(e),t&&this.startBackgroundLoading(e),o}catch(o){const n=o instanceof Error?o.message:"Failed to load data";throw a.update(i=>({...i,loading:{...i.loading,[e]:!1},errorMessages:{...i.errorMessages,[e]:n}})),console.error(`Failed to load ${e}:`,o),o}finally{this.loadingPromises.delete(e)}}async attemptLoad(e){const t=(this.retryAttempts.get(e)||0)+1;this.retryAttempts.set(e,t);try{let s;switch(e){case"repositories":s=d.listRepositories();break;case"organizations":s=d.listOrganizations();break;case"enterprises":s=d.listEnterprises();break;case"pools":s=d.listAllPools();break;case"scalesets":s=d.listScaleSets();break;case"credentials":s=d.listAllCredentials();break;case"endpoints":s=d.listAllEndpoints();break;case"controllerInfo":s=d.getControllerInfo();break;default:throw new Error(`Unknown resource type: ${e}`)}return await s}catch(s){if(tsetTimeout(n,o)),this.attemptLoad(e)}else throw console.error(`All ${this.MAX_RETRIES} attempts failed for ${e}:`,s),s}}async startBackgroundLoading(e){const s=["repositories","organizations","enterprises","pools","scalesets","credentials","endpoints"].filter(o=>o!==e);for(const o of s)setTimeout(()=>{this.loadResource(o,!1).catch(n=>{console.warn(`Background loading failed for ${o}:`,n)})},100*s.indexOf(o))}retryResource(e){return this.retryAttempts.delete(e),this.loadResource(e,!0)}setupWebSocketSubscriptions(){this.cleanup();const e=[r.subscribeToEntity("repository",["create","update","delete"],this.handleRepositoryEvent.bind(this)),r.subscribeToEntity("organization",["create","update","delete"],this.handleOrganizationEvent.bind(this)),r.subscribeToEntity("enterprise",["create","update","delete"],this.handleEnterpriseEvent.bind(this)),r.subscribeToEntity("pool",["create","update","delete"],this.handlePoolEvent.bind(this)),r.subscribeToEntity("scaleset",["create","update","delete"],this.handleScaleSetEvent.bind(this)),r.subscribeToEntity("controller",["update"],this.handleControllerEvent.bind(this)),r.subscribeToEntity("github_credentials",["create","update","delete"],this.handleCredentialsEvent.bind(this)),r.subscribeToEntity("gitea_credentials",["create","update","delete"],this.handleCredentialsEvent.bind(this)),r.subscribeToEntity("github_endpoint",["create","update","delete"],this.handleEndpointEvent.bind(this))];this.unsubscribers=e,this.setupWebSocketStatusMonitoring()}setupWebSocketStatusMonitoring(){this.websocketStatusUnsubscriber&&this.websocketStatusUnsubscriber();let e=!1;this.websocketStatusUnsubscriber=r.subscribe(t=>{t.connected&&!e&&(console.log("[EagerCache] WebSocket connected - reinitializing cache"),this.initializeAllResources()),e=t.connected})}async initializeAllResources(){const t=["repositories","organizations","enterprises","pools","scalesets","credentials","endpoints","controllerInfo"].map(s=>this.loadResource(s,!0).catch(o=>{console.warn(`Failed to reload ${s} on WebSocket reconnect:`,o)}));await Promise.allSettled(t)}handleRepositoryEvent(e){a.update(t=>{if(!t.loaded.repositories)return t;const s=[...t.repositories],o=e.payload;if(e.operation==="create")s.push(o);else if(e.operation==="update"){const n=s.findIndex(i=>i.id===o.id);n!==-1&&(s[n]=o)}else if(e.operation==="delete"){const n=typeof o=="object"?o.id:o,i=s.findIndex(c=>c.id===n);i!==-1&&s.splice(i,1)}return{...t,repositories:s}})}handleOrganizationEvent(e){a.update(t=>{if(!t.loaded.organizations)return t;const s=[...t.organizations],o=e.payload;if(e.operation==="create")s.push(o);else if(e.operation==="update"){const n=s.findIndex(i=>i.id===o.id);n!==-1&&(s[n]=o)}else if(e.operation==="delete"){const n=typeof o=="object"?o.id:o,i=s.findIndex(c=>c.id===n);i!==-1&&s.splice(i,1)}return{...t,organizations:s}})}handleEnterpriseEvent(e){a.update(t=>{if(!t.loaded.enterprises)return t;const s=[...t.enterprises],o=e.payload;if(e.operation==="create")s.push(o);else if(e.operation==="update"){const n=s.findIndex(i=>i.id===o.id);n!==-1&&(s[n]=o)}else if(e.operation==="delete"){const n=typeof o=="object"?o.id:o,i=s.findIndex(c=>c.id===n);i!==-1&&s.splice(i,1)}return{...t,enterprises:s}})}handlePoolEvent(e){a.update(t=>{if(!t.loaded.pools)return t;const s=[...t.pools],o=e.payload;if(e.operation==="create")s.push(o);else if(e.operation==="update"){const n=s.findIndex(i=>i.id===o.id);n!==-1&&(s[n]=o)}else if(e.operation==="delete"){const n=typeof o=="object"?o.id:o,i=s.findIndex(c=>c.id===n);i!==-1&&s.splice(i,1)}return{...t,pools:s}})}handleScaleSetEvent(e){a.update(t=>{if(!t.loaded.scalesets)return t;const s=[...t.scalesets],o=e.payload;if(e.operation==="create")s.push(o);else if(e.operation==="update"){const n=s.findIndex(i=>i.id===o.id);n!==-1&&(s[n]=o)}else if(e.operation==="delete"){const n=typeof o=="object"?o.id:o,i=s.findIndex(c=>c.id===n);i!==-1&&s.splice(i,1)}return{...t,scalesets:s}})}handleCredentialsEvent(e){a.update(t=>{if(!t.loaded.credentials)return t;const s=[...t.credentials],o=e.payload;if(e.operation==="create")s.push(o);else if(e.operation==="update"){const n=s.findIndex(i=>i.id===o.id);n!==-1&&(s[n]=o)}else if(e.operation==="delete"){const n=typeof o=="object"?o.id:o,i=s.findIndex(c=>c.id===n);i!==-1&&s.splice(i,1)}return{...t,credentials:s}})}handleEndpointEvent(e){a.update(t=>{if(!t.loaded.endpoints)return t;const s=[...t.endpoints],o=e.payload;if(e.operation==="create")s.push(o);else if(e.operation==="update"){const n=s.findIndex(i=>i.name===o.name);n!==-1&&(s[n]=o)}else if(e.operation==="delete"){const n=typeof o=="object"?o.name:o,i=s.findIndex(c=>c.name===n);i!==-1&&s.splice(i,1)}return{...t,endpoints:s}})}cleanup(){this.unsubscribers.forEach(e=>e()),this.unsubscribers=[],this.websocketStatusUnsubscriber&&(this.websocketStatusUnsubscriber(),this.websocketStatusUnsubscriber=null)}shouldUseCache(){return l(r).connected}async getRepositories(){if(!l(r).connected)return console.log("[EagerCache] WebSocket disconnected - fetching repositories directly from API"),await d.listRepositories();const t=l(a);return t.loaded.repositories?t.repositories:this.loadResource("repositories",!0)}async getOrganizations(){if(!l(r).connected)return console.log("[EagerCache] WebSocket disconnected - fetching organizations directly from API"),await d.listOrganizations();const t=l(a);return t.loaded.organizations?t.organizations:this.loadResource("organizations",!0)}async getEnterprises(){if(!l(r).connected)return console.log("[EagerCache] WebSocket disconnected - fetching enterprises directly from API"),await d.listEnterprises();const t=l(a);return t.loaded.enterprises?t.enterprises:this.loadResource("enterprises",!0)}async getPools(){if(!l(r).connected)return console.log("[EagerCache] WebSocket disconnected - fetching pools directly from API"),await d.listAllPools();const t=l(a);return t.loaded.pools?t.pools:this.loadResource("pools",!0)}async getScaleSets(){if(!l(r).connected)return console.log("[EagerCache] WebSocket disconnected - fetching scalesets directly from API"),await d.listScaleSets();const t=l(a);return t.loaded.scalesets?t.scalesets:this.loadResource("scalesets",!0)}async getCredentials(){if(!l(r).connected)return console.log("[EagerCache] WebSocket disconnected - fetching credentials directly from API"),await d.listAllCredentials();const t=l(a);return t.loaded.credentials?t.credentials:this.loadResource("credentials",!0)}async getEndpoints(){if(!l(r).connected)return console.log("[EagerCache] WebSocket disconnected - fetching endpoints directly from API"),await d.listAllEndpoints();const t=l(a);return t.loaded.endpoints?t.endpoints:this.loadResource("endpoints",!0)}async getControllerInfo(){if(!l(r).connected)return console.log("[EagerCache] WebSocket disconnected - fetching controller info directly from API"),await d.getControllerInfo();const t=l(a);return t.loaded.controllerInfo?t.controllerInfo:this.loadResource("controllerInfo",!0)}handleControllerEvent(e){a.update(t=>{if(!t.loaded.controllerInfo)return t;const s=e.payload;return e.operation==="update"?{...t,controllerInfo:s}:t})}}const h=new u;typeof window<"u"&&h.setupWebSocketSubscriptions();export{h as a,a as e}; diff --git a/webapp/assets/_app/immutable/chunks/DAWfW-VQ.js b/webapp/assets/_app/immutable/chunks/DAWfW-VQ.js deleted file mode 100644 index 0508ff54..00000000 --- a/webapp/assets/_app/immutable/chunks/DAWfW-VQ.js +++ /dev/null @@ -1 +0,0 @@ -import"./DsnmJJEf.js";import{i as R}from"./zNh6Oe5P.js";import{p as q,l as w,a as A,f as x,t as v,c as k,d as B,k as D,j as u,s as _,m as y,r as f,n as m,u as h,g as d,v as U}from"./sWNKMed7.js";import{p as o,i as F}from"./Ccl3fNd2.js";import{c as g,s as G,d as r}from"./DVl4ZBgx.js";var H=x('
                '),J=x('');function V(b,n){q(n,!1);const i=y(),p=y();let e=o(n,"item",8),s=o(n,"entityType",8,"repository"),E=o(n,"showOwner",8,!1),I=o(n,"showId",8,!1),z=o(n,"fontMono",8,!1);function C(){if(!e())return"Unknown";switch(s()){case"repository":return E()?`${e().owner||"Unknown"}/${e().name||"Unknown"}`:e().name||"Unknown";case"organization":case"enterprise":return e().name||"Unknown";case"pool":return I()?e().id||"Unknown":e().name||"Unknown";case"scaleset":return e().name||"Unknown";case"instance":return e().name||"Unknown";default:return e().name||e().id||"Unknown"}}function M(){if(!e())return"#";let t;switch(s()){case"instance":t=e().name;break;default:t=e().id||e().name;break}if(!t)return"#";switch(s()){case"repository":return r(`/repositories/${t}`);case"organization":return r(`/organizations/${t}`);case"enterprise":return r(`/enterprises/${t}`);case"pool":return r(`/pools/${t}`);case"scaleset":return r(`/scalesets/${t}`);case"instance":return r(`/instances/${encodeURIComponent(t)}`);default:return"#"}}w(()=>{},()=>{_(i,C())}),w(()=>{},()=>{_(p,M())}),A(),R();var c=J(),a=u(c),N=u(a,!0);f(a);var O=D(a);{var T=t=>{var l=H(),j=u(l,!0);f(l),v(()=>U(j,(m(e()),h(()=>e().provider_id)))),k(t,l)};F(O,t=>{m(s()),m(e()),h(()=>s()==="instance"&&e()?.provider_id)&&t(T)})}f(c),v(()=>{g(a,"href",d(p)),G(a,1,`block w-full truncate text-blue-600 dark:text-blue-400 hover:text-blue-500 dark:hover:text-blue-300 ${z()?"font-mono":""}`),g(a,"title",d(i)),U(N,d(i))}),k(b,c),B()}export{V as E}; diff --git a/webapp/assets/_app/immutable/chunks/DCYYzf48.js b/webapp/assets/_app/immutable/chunks/DCYYzf48.js deleted file mode 100644 index 9ac73c5c..00000000 --- a/webapp/assets/_app/immutable/chunks/DCYYzf48.js +++ /dev/null @@ -1 +0,0 @@ -import"./DsnmJJEf.js";import{i as ke}from"./zNh6Oe5P.js";import{V as ut,aU as gt,aV as ft,M as ht,O as mt,P as pt,Q as kt,R as xt,at as _t,K as Xe,L as yt,U as bt,T as wt,a8 as Mt,p as fe,l as $,s as ee,m as te,n as u,a as Le,C as ye,j as s,g as e,r as n,t as R,c as i,d as he,E as be,f as L,e as Ge,u as v,k as j,v as O,z as Se,D as Te,B as Q,b as q,x as Pt,q as X}from"./sWNKMed7.js";import{p as o,l as Ye,i as I,b as Ct}from"./Ccl3fNd2.js";import{e as de,i as ge}from"./BuuPrWMc.js";import{h as Ae,s as pe,i as jt,f as zt,B as Ce,r as Tt,c as Be,b as Lt,d as Ht,e as $e,j as It}from"./DVl4ZBgx.js";import{c as St}from"./CCYOsezl.js";import{b as At}from"./CLagxtgo.js";import{B as Bt,g as et,b as Et}from"./cjRLNre3.js";function tt(S,r,g){Xe&&yt();var a=S,t=_t,p,h,m=null,f=ut()?gt:ft;function M(){p&&bt(p),m!==null&&(m.lastChild.remove(),a.before(m),m=null),p=h}ht(()=>{if(f(t,t=r())){var k=a,A=xt();A&&(m=document.createDocumentFragment(),m.append(k=mt())),h=pt(()=>g(k)),A?kt.add_callback(M):M()}}),Xe&&(a=wt)}function Fe(S,r){var g=S.$$events?.[r.type],a=Mt(g)?g.slice():g==null?[]:[g];for(var t of a)t.call(this,r)}var Dt=ye('');function Vt(S,r){fe(r,!1);const g=te();let a=o(r,"name",8),t=o(r,"class",8,"h-5 w-5");const p={plus:'',edit:'',delete:'',view:'',close:'',check:'',x:'',"chevron-left":'',"chevron-right":'',"chevron-down":'',"chevron-up":'',search:'',refresh:'',menu:'',settings:'',"check-circle":'',"x-circle":'',"exclamation-circle":'',"information-circle":'',loading:'',sun:'',moon:'',document:'',folder:''};$(()=>u(a()),()=>{ee(g,p[a()]||"")}),Le();var h=Dt(),m=s(h);Ae(m,()=>e(g),!0),n(h),R(()=>pe(h,0,`${t()}`)),i(S,h),he()}var Nt=L('');function rt(S,r){const g=Ye(r,["children","$$slots","$$events","$$legacy"]),a=Ye(g,["action","disabled","title","ariaLabel","size"]);fe(r,!1);const t=te(),p=te(),h=te(),m=te(),f=te(),M=te(),k=te(),A=te(),U=te(),V=be();let P=o(r,"action",8,"edit"),Z=o(r,"disabled",8,!1),B=o(r,"title",8,""),x=o(r,"ariaLabel",8,""),H=o(r,"size",8,"md");function D(){Z()||V("click")}$(()=>{},()=>{ee(t,"transition-colors focus:outline-none focus:ring-2 focus:ring-offset-2 dark:focus:ring-offset-gray-900 cursor-pointer disabled:cursor-not-allowed disabled:opacity-50")}),$(()=>u(H()),()=>{ee(p,{sm:"p-1",md:"p-2"}[H()])}),$(()=>u(P()),()=>{ee(h,{edit:"text-indigo-600 dark:text-indigo-400 hover:text-indigo-900 dark:hover:text-indigo-300 focus:ring-indigo-500",delete:"text-red-600 dark:text-red-400 hover:text-red-900 dark:hover:text-red-300 focus:ring-red-500",view:"text-gray-600 dark:text-gray-400 hover:text-gray-900 dark:hover:text-gray-300 focus:ring-gray-500",add:"text-green-600 dark:text-green-400 hover:text-green-900 dark:hover:text-green-300 focus:ring-green-500"}[P()])}),$(()=>u(H()),()=>{ee(m,H()==="sm"?"h-4 w-4":"h-5 w-5")}),$(()=>(e(t),e(p),e(h)),()=>{ee(f,[e(t),e(p),e(h)].join(" "))}),$(()=>{},()=>{ee(M,{edit:'',delete:'',view:'',add:''})}),$(()=>{},()=>{ee(k,{edit:"Edit",delete:"Delete",view:"View",add:"Add"})}),$(()=>(u(B()),e(k),u(P())),()=>{ee(A,B()||e(k)[P()])}),$(()=>(u(x()),e(k),u(P())),()=>{ee(U,x()||`${e(k)[P()]} item`)}),Le(),ke();var F=Nt();jt(F,()=>({type:"button",class:e(f),disabled:Z(),title:e(A),"aria-label":e(U),...a}));var J=s(F),l=s(J);Ae(l,()=>(e(M),u(P()),v(()=>e(M)[P()])),!0),n(J),n(F),R(()=>pe(J,0,zt(e(m)))),Ge("click",F,D),i(S,F),he()}var Rt=L('

                ');function Ut(S,r){let g=o(r,"message",8,"Loading...");var a=Rt(),t=j(s(a),2),p=s(t,!0);n(t),n(a),R(()=>O(p,g())),i(S,a)}var qt=L('
                '),Ot=L('

                ');function Ft(S,r){let g=o(r,"title",8,"Error"),a=o(r,"message",8),t=o(r,"showRetry",8,!1),p=o(r,"onRetry",8,void 0);var h=Ot(),m=s(h),f=s(m),M=j(s(f),2),k=s(M),A=s(k,!0);n(k);var U=j(k,2),V=s(U,!0);n(U);var P=j(U,2);{var Z=B=>{var x=qt(),H=s(x);Ce(H,{variant:"secondary",size:"sm",icon:"",class:"text-red-700 dark:text-red-200 bg-red-100 dark:bg-red-800 hover:bg-red-200 dark:hover:bg-red-700 focus:outline-none focus:bg-red-200 dark:focus:bg-red-700",$$events:{click(...D){p()?.apply(this,D)}},children:(D,F)=>{Se();var J=Te("Retry");i(D,J)},$$slots:{default:!0}}),n(x),i(B,x)};I(P,B=>{t()&&p()&&B(Z)})}n(M),n(f),n(m),n(h),R(()=>{O(A,g()),O(V,a())}),i(S,h)}var Gt=ye(''),Kt=ye(''),Qt=ye(''),Zt=ye(''),Jt=ye(''),Wt=ye(''),Xt=L('

                ');function Yt(S,r){let g=o(r,"title",8),a=o(r,"message",8),t=o(r,"iconType",8,"document");var p=Xt(),h=s(p);{var m=V=>{var P=Gt();i(V,P)},f=V=>{var P=Q(),Z=q(P);{var B=H=>{var D=Kt();i(H,D)},x=H=>{var D=Q(),F=q(D);{var J=d=>{var c=Qt();i(d,c)},l=d=>{var c=Q(),y=q(c);{var N=w=>{var E=Zt();i(w,E)},W=w=>{var E=Q(),z=q(E);{var C=T=>{var G=Jt();i(T,G)},_=T=>{var G=Q(),Y=q(G);{var re=ae=>{var ce=Wt();i(ae,ce)};I(Y,ae=>{t()==="settings"&&ae(re)},!0)}i(T,G)};I(z,T=>{t()==="key"?T(C):T(_,!1)},!0)}i(w,E)};I(y,w=>{t()==="cog"?w(N):w(W,!1)},!0)}i(d,c)};I(F,d=>{t()==="users"?d(J):d(l,!1)},!0)}i(H,D)};I(Z,H=>{t()==="building"?H(B):H(x,!1)},!0)}i(V,P)};I(h,V=>{t()==="document"?V(m):V(f,!1)})}var M=j(h,2),k=s(M,!0);n(M);var A=j(M,2),U=s(A,!0);n(A),n(p),R(()=>{O(k,g()),O(U,a())}),i(S,p)}var $t=L('
                ');function er(S,r){fe(r,!1);let g=o(r,"value",12,""),a=o(r,"placeholder",8,"Search..."),t=o(r,"disabled",8,!1);const p=be();function h(){p("input",g())}ke();var m=$t(),f=s(m),M=s(f);Vt(M,{name:"search",class:"h-5 w-5 text-gray-400"}),n(f);var k=j(f,2);Tt(k),n(m),R(()=>{Be(k,"placeholder",a()),k.disabled=t()}),At(k,g),Ge("input",k,h),i(S,m),he()}var tr=L(""),rr=L('
                '),ar=L('
                ');function nr(S,r){fe(r,!1);let g=o(r,"searchTerm",12,""),a=o(r,"perPage",12,25),t=o(r,"placeholder",8,"Search..."),p=o(r,"showPerPageSelector",8,!0),h=o(r,"perPageOptions",24,()=>[25,50,100]);const m=be();function f(){m("search",{term:g()})}function M(){m("perPageChange",{perPage:a()})}ke();var k=ar(),A=s(k),U=s(A),V=s(U),P=j(s(V),2);er(P,{get placeholder(){return t()},get value(){return g()},set value(x){g(x)},$$events:{input:f},$$legacy:!0}),n(V),n(U);var Z=j(U,2);{var B=x=>{var H=rr(),D=s(H),F=j(s(D),2);R(()=>{a(),Pt(()=>{h()})}),de(F,5,h,ge,(J,l)=>{var d=tr(),c=s(d,!0);n(d);var y={};R(()=>{O(c,e(l)),y!==(y=e(l))&&(d.value=(d.__value=e(l))??"")}),i(J,d)}),n(F),n(D),n(H),Lt(F,a),Ge("change",F,M),i(x,H)};I(Z,x=>{p()&&x(B)})}n(A),n(k),i(S,k),he()}var ir=L('Showing to of ',1),or=L('
                ');function sr(S,r){fe(r,!1);const g=te(),a=te();let t=o(r,"currentPage",8,1),p=o(r,"totalPages",8,1),h=o(r,"perPage",8,25),m=o(r,"totalItems",8,0),f=o(r,"itemName",8,"results");const M=be();function k(P){P>=1&&P<=p()&&P!==t()&&M("pageChange",{page:P})}$(()=>(u(m()),u(t()),u(h())),()=>{ee(g,m()===0?0:(t()-1)*h()+1)}),$(()=>(u(t()),u(h()),u(m())),()=>{ee(a,Math.min(t()*h(),m()))}),Le(),ke();var A=Q(),U=q(A);{var V=P=>{var Z=or(),B=s(Z),x=s(B);{let z=X(()=>t()===1);Ce(x,{variant:"secondary",get disabled(){return e(z)},$$events:{click:()=>k(t()-1)},children:(C,_)=>{Se();var T=Te("Previous");i(C,T)},$$slots:{default:!0}})}var H=j(x,2);{let z=X(()=>t()===p());Ce(H,{variant:"secondary",get disabled(){return e(z)},class:"ml-3",$$events:{click:()=>k(t()+1)},children:(C,_)=>{Se();var T=Te("Next");i(C,T)},$$slots:{default:!0}})}n(B);var D=j(B,2),F=s(D),J=s(F),l=s(J);{var d=z=>{var C=Te();R(()=>O(C,`No ${f()??""}`)),i(z,C)},c=z=>{var C=ir(),_=j(q(C)),T=s(_,!0);n(_);var G=j(_,2),Y=s(G,!0);n(G);var re=j(G,2),ae=s(re,!0);n(re);var ce=j(re);R(()=>{O(T,e(g)),O(Y,e(a)),O(ae,m()),O(ce,` ${f()??""}`)}),i(z,C)};I(l,z=>{m()===0?z(d):z(c,!1)})}n(J),n(F);var y=j(F,2),N=s(y),W=s(N);{let z=X(()=>t()===1);Ce(W,{variant:"secondary",size:"sm",get disabled(){return e(z)},class:"rounded-r-none","aria-label":"Previous page",icon:"",$$events:{click:()=>k(t()-1)}})}var w=j(W,2);de(w,1,()=>(u(p()),v(()=>Array(p()))),ge,(z,C,_)=>{const T=X(()=>_+1);{let G=X(()=>e(T)===t()?"primary":"secondary");Ce(z,{get variant(){return e(G)},size:"sm",class:"rounded-none border-l-0 first:border-l first:rounded-l-md",$$events:{click:()=>k(e(T))},children:(Y,re)=>{Se();var ae=Te();R(()=>O(ae,e(T))),i(Y,ae)},$$slots:{default:!0}})}});var E=j(w,2);{let z=X(()=>t()===p());Ce(E,{variant:"secondary",size:"sm",get disabled(){return e(z)},class:"rounded-l-none","aria-label":"Next page",icon:"",$$events:{click:()=>k(t()+1)}})}n(N),n(y),n(D),n(Z),i(P,Z)};I(U,P=>{p()>1&&P(V)})}i(S,A),he()}var lr=L('

                '),dr=L('

                '),cr=L('

                '),vr=L('

                '),ur=L('
                '),gr=L('
                '),fr=L('
                '),hr=L(" "),mr=L('
                '),pr=L('
                ');function kr(S,r){fe(r,!1);const g=be();let a=o(r,"item",8),t=o(r,"config",8);function p(){if(!a())return"Unknown";const{field:l,useId:d,showOwner:c}=t().primaryText,y=a()[l];return d&&y?`${y.slice(0,8)}...`:c&&a().owner&&a().name?`${a().owner}/${a().name}`:y||"Unknown"}function h(){if(!t().secondaryText)return"";const{field:l,computedValue:d}=t().secondaryText;return d!==void 0?typeof d=="function"?d(a()):d:a()?.[l]||""}function m(){if(!t().primaryText.href||!a())return"#";let l=t().primaryText.href;return l=l.replace("{id}",a().id||""),l=l.replace("{name}",encodeURIComponent(a().name||"")),Ht(l)}function f(l){if(!a())return;const d=t().actions?.find(c=>c.type===l);d&&d.handler(a()),l==="edit"?g("edit",{item:a()}):l==="delete"?g("delete",{item:a()}):g("action",{type:l,item:a()})}function M(l){switch(l.type){case"status":if(t().entityType==="instance"){const c=a()?.[l.field]||"unknown";let y="neutral",N=c.charAt(0).toUpperCase()+c.slice(1);return l.field==="status"?y=c==="running"?"success":c==="pending"||c==="creating"?"info":c==="failed"||c==="error"?"error":"neutral":l.field==="runner_status"&&(y=c==="idle"?"info":c==="active"||c==="running"?"success":c==="failed"||c==="error"?"error":"neutral"),{variant:y,text:N}}return{variant:"neutral",text:a()?.[l.field]||"Unknown"};case"forge":return{variant:"neutral",text:a()?.[l.field]||"unknown"};case"auth":const d=a()?.[l.field]||"pat";return{variant:d==="pat"?"success":"info",text:d.toUpperCase()};case"custom":if(typeof l.value=="function"){const c=l.value(a());return{variant:c?.variant||"neutral",text:c?.text||""}}return{variant:l.value?.variant||"neutral",text:l.value?.text||""};default:return{variant:"neutral",text:""}}}ke();var k=pr(),A=s(k),U=s(A);{var V=l=>{var d=dr(),c=s(d),y=s(c,!0);n(c);var N=j(c,2);{var W=w=>{var E=lr(),z=s(E,!0);n(E),R(C=>O(z,C),[()=>v(h)]),i(w,E)};I(N,w=>{u(t()),v(()=>t().secondaryText)&&w(W)})}n(d),R((w,E)=>{Be(d,"href",w),pe(c,1,`text-sm font-medium text-blue-600 dark:text-blue-400 hover:text-blue-500 dark:hover:text-blue-300 truncate${u(t()),v(()=>t().primaryText.isMonospace?" font-mono":"")??""}`),O(y,E)},[()=>v(m),()=>v(p)]),i(l,d)},P=l=>{var d=vr(),c=s(d),y=s(c,!0);n(c);var N=j(c,2);{var W=w=>{var E=cr(),z=s(E,!0);n(E),R(C=>O(z,C),[()=>v(h)]),i(w,E)};I(N,w=>{u(t()),v(()=>t().secondaryText)&&w(W)})}n(d),R(w=>O(y,w),[()=>v(p)]),i(l,d)};I(U,l=>{u(t()),v(()=>t().primaryText.isClickable)?l(V):l(P,!1)})}var Z=j(U,2);{var B=l=>{var d=fr(),c=s(d);{var y=w=>{var E=Q(),z=q(E);de(z,1,()=>(u(t()),v(()=>t().customInfo)),ge,(C,_)=>{const T=X(()=>(e(_),u(a()),v(()=>typeof e(_).icon=="function"?e(_).icon(a()):e(_).icon))),G=X(()=>(e(_),u(a()),v(()=>typeof e(_).text=="function"?e(_).text(a()):e(_).text)));var Y=ur(),re=s(Y);{var ae=je=>{var He=Q(),De=q(He);Ae(De,()=>e(T)),i(je,He)};I(re,je=>{e(T)&&je(ae)})}var ce=j(re,2),Ee=s(ce,!0);n(ce),n(Y),R(()=>O(Ee,e(G))),i(C,Y)}),i(w,E)};I(c,w=>{u(t()),v(()=>t().customInfo)&&w(y)})}var N=j(c,2);{var W=w=>{var E=Q(),z=q(E);de(z,1,()=>(u(t()),v(()=>t().badges.filter(C=>C.type==="forge"))),ge,(C,_)=>{var T=gr(),G=s(T);Ae(G,()=>(u(et),e(_),u(a()),v(()=>et(e(_).field?a()?.[e(_).field]||"unknown":a()?.endpoint?.endpoint_type||"unknown"))));var Y=j(G,2),re=s(Y,!0);n(Y),n(T),R(()=>O(re,(u(a()),v(()=>a()?.endpoint?.name||"Unknown")))),i(C,T)}),i(w,E)};I(N,w=>{u(t()),v(()=>t().badges)&&w(W)})}n(d),i(l,d)};I(Z,l=>{u(t()),v(()=>t().customInfo||t().badges?.some(d=>d.type==="forge"))&&l(B)})}n(A);var x=j(A,2),H=s(x);{var D=l=>{var d=Q(),c=q(d);de(c,1,()=>(u(t()),v(()=>t().badges.filter(y=>y.type!=="forge"))),ge,(y,N)=>{var W=Q(),w=q(W);{var E=C=>{const _=X(()=>(e(N),v(()=>M(e(N)))));var T=hr(),G=s(T,!0);n(T),R(()=>{pe(T,1,`inline-flex items-center rounded-full px-2 py-1 text-xs font-medium ring-1 ring-inset ${u(e(_)),v(()=>e(_).variant==="success"?"bg-green-50 text-green-700 ring-green-600/20 dark:bg-green-900/50 dark:text-green-300 dark:ring-green-400/20":e(_).variant==="info"?"bg-blue-50 text-blue-700 ring-blue-600/20 dark:bg-blue-900/50 dark:text-blue-300 dark:ring-blue-400/20":e(_).variant==="error"?"bg-red-50 text-red-700 ring-red-600/20 dark:bg-red-900/50 dark:text-red-300 dark:ring-red-400/20":"bg-gray-50 text-gray-700 ring-gray-600/20 dark:bg-gray-900/50 dark:text-gray-300 dark:ring-gray-400/20")??""}`),O(G,(u(e(_)),v(()=>e(_).text)))}),i(C,T)},z=C=>{const _=X(()=>(e(N),v(()=>M(e(N)))));Bt(C,{get variant(){return u(e(_)),v(()=>e(_).variant)},get text(){return u(e(_)),v(()=>e(_).text)}})};I(w,C=>{e(N),v(()=>e(N).type==="status")?C(E):C(z,!1)})}i(y,W)}),i(l,d)};I(H,l=>{u(t()),v(()=>t().badges)&&l(D)})}var F=j(H,2);{var J=l=>{var d=mr();de(d,5,()=>(u(t()),v(()=>t().actions)),ge,(c,y)=>{{let N=X(()=>(e(y),u(t()),v(()=>e(y).type==="edit"?`Edit ${t().entityType}`:`Delete ${t().entityType}`))),W=X(()=>(e(y),u(t()),v(()=>e(y).type==="edit"?`Edit ${t().entityType}`:`Delete ${t().entityType}`)));rt(c,{get action(){return e(y),v(()=>e(y).type)},size:"sm",get title(){return e(N)},get ariaLabel(){return e(W)},$$events:{click:()=>f(e(y).type)}})}}),n(d),i(l,d)};I(F,l=>{u(t()),v(()=>t().actions)&&l(J)})}n(x),n(k),i(S,k),he()}var xr=L('
                '),_r=L('
                '),yr=L("
                "),br=L("
                "),wr=L(' ',1),Mr=L('
                ');function Dr(S,r){fe(r,!1);const g=te();let a=o(r,"columns",24,()=>[]),t=o(r,"data",24,()=>[]),p=o(r,"loading",8,!1),h=o(r,"error",8,""),m=o(r,"totalItems",8,0),f=o(r,"itemName",8,"results"),M=o(r,"searchTerm",12,""),k=o(r,"searchPlaceholder",8,"Search..."),A=o(r,"showSearch",8,!0),U=o(r,"currentPage",8,1),V=o(r,"perPage",12,25),P=o(r,"totalPages",8,1),Z=o(r,"showPagination",8,!0),B=o(r,"showPerPageSelector",8,!0),x=o(r,"emptyTitle",8,"No items found"),H=o(r,"emptyMessage",8,""),D=o(r,"emptyIconType",8,"document"),F=o(r,"errorTitle",8,"Error loading data"),J=o(r,"showRetry",8,!1),l=o(r,"showMobileCards",8,!0),d=o(r,"mobileCardConfig",8,null);const c=be();function y(b){c("search",b.detail)}function N(b){c("pageChange",b.detail)}function W(b){c("perPageChange",b.detail)}function w(){c("retry")}function E(b){c("edit",b.detail)}function z(b){c("delete",b.detail)}function C(b){c("action",b.detail)}function _(b){const ve="px-6 py-4 text-sm",Ve=b.align==="right"?"text-right":b.align==="center"?"text-center":"text-left",Ne=b.key==="actions"?"font-medium":"text-gray-900 dark:text-white",Re=b.flexible?"min-w-0":"";return`${ve} ${Ve} ${Ne} ${Re}`.trim()}function T(){return a().map(b=>b.flexible?`${b.flexRatio||1}fr`:"auto").join(" ")}$(()=>(u(H()),u(M()),u(f())),()=>{ee(g,H()||(M()?`No items found matching "${M()}"`:`No ${f()} found`))}),Le(),ke();var G=Mr(),Y=s(G);{var re=b=>{nr(b,{get placeholder(){return k()},get showPerPageSelector(){return B()},get searchTerm(){return M()},set searchTerm(ve){M(ve)},get perPage(){return V()},set perPage(ve){V(ve)},$$events:{search:y,perPageChange:W},$$legacy:!0})};I(Y,b=>{A()&&b(re)})}var ae=j(Y,2),ce=s(ae);{var Ee=b=>{Ut(b,{get message(){return`Loading ${f()??""}...`}})},je=b=>{var ve=Q(),Ve=q(ve);{var Ne=we=>{{let Ie=X(()=>J()?w:void 0);Ft(we,{get title(){return F()},get message(){return h()},get showRetry(){return J()},get onRetry(){return e(Ie)}})}},Re=we=>{var Ie=Q(),at=q(Ie);{var nt=Me=>{Yt(Me,{get title(){return x()},get message(){return e(g)},get iconType(){return D()}})},it=Me=>{var Ke=wr(),Qe=q(Ke);{var ot=oe=>{var K=_r();de(K,7,t,(le,ne)=>le.id||le.name||ne,(le,ne,qe)=>{var ze=xr(),ie=s(ze);{var Pe=me=>{var xe=Q(),se=q(xe);tt(se,()=>(e(ne),v(()=>`${e(ne).id||e(ne).name}-${e(ne).updated_at}-mobile`)),_e=>{kr(_e,{get item(){return e(ne)},get config(){return d()},$$events:{edit(ue){Fe.call(this,r,ue)},delete(ue){Fe.call(this,r,ue)},action(ue){Fe.call(this,r,ue)}}})}),i(me,xe)},Oe=me=>{var xe=Q(),se=q(xe);$e(se,r,"mobile-card",{get item(){return e(ne)},get index(){return e(qe)}}),i(me,xe)};I(ie,me=>{d()?me(Pe):me(Oe,!1)})}n(ze),i(le,ze)}),n(K),i(oe,K)};I(Qe,oe=>{l()&&oe(ot)})}var Ze=j(Qe,2),Ue=s(Ze),Je=s(Ue);de(Je,1,a,ge,(oe,K)=>{var le=yr(),ne=s(le,!0);n(le),R(()=>{pe(le,1,`px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-300 uppercase tracking-wider bg-gray-50 dark:bg-gray-700 border-b border-gray-200 dark:border-gray-600 ${e(K),v(()=>e(K).align==="right"?"text-right":e(K).align==="center"?"text-center":"text-left")??""}`),O(ne,(e(K),v(()=>e(K).title)))}),i(oe,le)});var st=j(Je,2);de(st,3,t,(oe,K)=>oe.id||oe.name||K,(oe,K,le)=>{var ne=Q(),qe=q(ne);de(qe,1,a,ge,(ze,ie)=>{var Pe=br(),Oe=s(Pe);{var me=se=>{var _e=Q(),ue=q(_e);tt(ue,()=>(e(K),e(ie),v(()=>`${e(K).id||e(K).name}-${e(K).updated_at}-${e(ie).key}`)),lt=>{var We=Q(),dt=q(We);St(dt,()=>e(ie).cellComponent,(ct,vt)=>{vt(ct,Ct({get item(){return e(K)}},()=>e(ie).cellProps,{$$events:{edit:E,delete:z,action:C}}))}),i(lt,We)}),i(se,_e)},xe=se=>{var _e=Q(),ue=q(_e);$e(ue,r,"cell",{get item(){return e(K)},get column(){return e(ie)},get index(){return e(le)},get value(){return e(K),e(ie),v(()=>e(K)[e(ie).key])}}),i(se,_e)};I(Oe,se=>{e(ie),v(()=>e(ie).cellComponent)?se(me):se(xe,!1)})}n(Pe),R(se=>pe(Pe,1,`${se??""} border-b border-gray-200 dark:border-gray-700`),[()=>(e(ie),v(()=>_(e(ie))))]),i(ze,Pe)}),i(oe,ne)}),n(Ue),n(Ze),R(oe=>It(Ue,`grid-template-columns: ${oe??""}`),[()=>v(T)]),i(Me,Ke)};I(at,Me=>{u(t()),v(()=>t().length===0)?Me(nt):Me(it,!1)},!0)}i(we,Ie)};I(Ve,we=>{h()?we(Ne):we(Re,!1)},!0)}i(b,ve)};I(ce,b=>{p()?b(Ee):b(je,!1)})}var He=j(ce,2);{var De=b=>{sr(b,{get currentPage(){return U()},get totalPages(){return P()},get perPage(){return V()},get totalItems(){return m()},get itemName(){return f()},$$events:{pageChange:N}})};I(He,b=>{u(Z()),u(p()),u(h()),u(t()),v(()=>Z()&&!p()&&!h()&&t().length>0)&&b(De)})}n(ae),n(G),i(S,G),he()}var Pr=L('
                ');function Vr(S,r){fe(r,!1);const g=be();let a=o(r,"item",8),t=o(r,"actions",24,()=>[{type:"edit",title:"Edit",ariaLabel:"Edit item",action:"edit"},{type:"delete",title:"Delete",ariaLabel:"Delete item",action:"delete"}]);function p(m){a()&&(m==="edit"?g("edit",{item:a()}):m==="delete"?g("delete",{item:a()}):g("action",{type:m,item:a()}))}ke();var h=Pr();de(h,5,t,ge,(m,f)=>{{let M=X(()=>(e(f),v(()=>e(f).action||(e(f).type==="edit"?"edit":e(f).type==="delete"?"delete":"view")))),k=X(()=>(e(f),v(()=>e(f).title||(e(f).type==="edit"?"Edit":e(f).type==="delete"?"Delete":e(f).label)))),A=X(()=>(e(f),v(()=>e(f).ariaLabel||(e(f).type==="edit"?"Edit item":e(f).type==="delete"?"Delete item":e(f).label))));rt(m,{get action(){return e(M)},get title(){return e(k)},get ariaLabel(){return e(A)},$$events:{click:()=>p(e(f).type)}})}}),n(h),i(S,h),he()}var Cr=L(" "),jr=L(" ");function Nr(S,r){fe(r,!1);const g=te(),a=te();let t=o(r,"item",8),p=o(r,"field",8),h=o(r,"type",8,"text"),m=o(r,"truncateLength",8,50),f=o(r,"showTitle",8,!1);function M(){return t()&&p().split(".").reduce((B,x)=>B?.[x],t())||""}function k(){return h()==="date"?Et(e(g)):h()==="truncated"&&e(g).length>m()?`${e(g).slice(0,m())}...`:e(g)}function A(){switch(h()){case"code":return"inline-block max-w-full truncate bg-gray-100 dark:bg-gray-700 px-2 py-1 rounded text-xs font-mono";case"description":return"block w-full truncate text-sm text-gray-500 dark:text-gray-300";case"date":return"block w-full truncate text-sm text-gray-900 dark:text-white font-mono";default:return"block w-full truncate text-sm text-gray-900 dark:text-white"}}$(()=>{},()=>{ee(g,M())}),$(()=>{},()=>{ee(a,k())}),Le(),ke();var U=Q(),V=q(U);{var P=B=>{var x=Cr(),H=s(x,!0);n(x),R(D=>{pe(x,1,`${D??""} ${f()?"cursor-default":""}`),Be(x,"title",f()?e(g):""),O(H,e(a))},[()=>v(A)]),i(B,x)},Z=B=>{var x=jr(),H=s(x,!0);n(x),R(D=>{pe(x,1,`${D??""} ${f()?"cursor-default":""}`),Be(x,"title",f()?e(g):""),O(H,e(a))},[()=>v(A)]),i(B,x)};I(V,B=>{h()==="code"?B(P):B(Z,!1)})}i(S,U),he()}export{rt as A,Dr as D,Nr as G,Vr as a,tt as k}; diff --git a/webapp/assets/_app/immutable/chunks/DN14Fk2Y.js b/webapp/assets/_app/immutable/chunks/DN14Fk2Y.js deleted file mode 100644 index c4d4b1f4..00000000 --- a/webapp/assets/_app/immutable/chunks/DN14Fk2Y.js +++ /dev/null @@ -1 +0,0 @@ -import"./DsnmJJEf.js";import{i as u}from"./zNh6Oe5P.js";import{p as v,E as m,f as h,j as r,r as d,e as t,c as k,d as g}from"./sWNKMed7.js";import{e as b}from"./DVl4ZBgx.js";var w=h('');function j(s,i){v(i,!1);const l=m();function n(){l("close")}function c(o){o.stopPropagation()}function f(o){o.key==="Escape"&&l("close")}u();var a=w(),e=r(a),p=r(e);b(p,i,"default",{}),d(e),d(a),t("click",e,c),t("click",a,n),t("keydown",a,f),k(s,a),g()}export{j as M}; diff --git a/webapp/assets/_app/immutable/chunks/DVl4ZBgx.js b/webapp/assets/_app/immutable/chunks/DVl4ZBgx.js deleted file mode 100644 index 26a66ec9..00000000 --- a/webapp/assets/_app/immutable/chunks/DVl4ZBgx.js +++ /dev/null @@ -1,7 +0,0 @@ -import{b as Tr,r as Br}from"./C6O4o7G1.js";import"./DsnmJJEf.js";import{i as Dr}from"./zNh6Oe5P.js";import{t as Ie,K as Y,L as Ke,aj as Lr,aW as jr,T as ht,a3 as kr,af as zr,aX as Fr,aY as Gr,aZ as ut,Y as _r,a_ as qr,Z as we,M as Jt,ae as Xe,P as Kt,F as ot,a8 as Hr,a$ as $r,b0 as Nr,aw as Wr,W as Qr,b1 as Mr,b2 as Jr,b3 as Kr,b4 as Xr,g as k,b5 as Yr,b6 as Zr,a2 as Ot,b7 as es,b8 as ts,b9 as rs,ba as ss,bb as as,at as os,bc as ns,bd as ls,be as is,p as cs,E as ps,l as ee,a as ds,f as hs,e as us,c as ue,d as Os,n as oe,m as te,s as re,j as qe,k as Pt,r as He,C as nt,B as Ps,b as bs}from"./sWNKMed7.js";import{l as bt,p as se,i as $e}from"./Ccl3fNd2.js";function mt(t,e,r=!1,s=!1,o=!1){var a=t,n="";Ie(()=>{var l=Lr;if(n===(n=e()??"")){Y&&Ke();return}if(l.nodes_start!==null&&(jr(l.nodes_start,l.nodes_end),l.nodes_start=l.nodes_end=null),n!==""){if(Y){ht.data;for(var i=Ke(),c=i;i!==null&&(i.nodeType!==kr||i.data!=="");)c=i,i=zr(i);if(i===null)throw Fr(),Gr;ut(ht,c),a=_r(i);return}var p=n+"";r?p=`${p}`:s&&(p=`${p}`);var R=qr(p);if((r||s)&&(R=we(R)),ut(we(R),R.lastChild),r||s)for(;we(R);)a.before(we(R));else a.before(R)}})}function ms(t,e,r,s,o){Y&&Ke();var a=e.$$slots?.[r],n=!1;a===!0&&(a=e[r==="default"?"children":r],n=!0),a===void 0||a(t,n?()=>s:s)}function Vs(t,e){var r=void 0,s;Jt(()=>{r!==(r=e())&&(s&&(Xe(s),s=null),r&&(s=Kt(()=>{ot(()=>r(t))})))})}function Xt(t){var e,r,s="";if(typeof t=="string"||typeof t=="number")s+=t;else if(typeof t=="object")if(Array.isArray(t)){var o=t.length;for(e=0;e=0;){var l=n+a;(n===0||Vt.includes(s[n-1]))&&(l===s.length||Vt.includes(s[l]))?s=(n===0?"":s.substring(0,n))+s.substring(l+1):n=l}}return s===""?null:s}function St(t,e=!1){var r=e?" !important;":";",s="";for(var o in t){var a=t[o];a!=null&&a!==""&&(s+=" "+o+": "+a+r)}return s}function Ne(t){return t[0]!=="-"||t[1]!=="-"?t.toLowerCase():t}function fs(t,e){if(e){var r="",s,o;if(Array.isArray(e)?(s=e[0],o=e[1]):s=e,t){t=String(t).replaceAll(/\s*\/\*.*?\*\/\s*/g,"").trim();var a=!1,n=0,l=!1,i=[];s&&i.push(...Object.keys(s).map(Ne)),o&&i.push(...Object.keys(o).map(Ne));var c=0,p=-1;const y=t.length;for(var R=0;R{Ce(t,t.__value)});e.observe(t,{childList:!0,subtree:!0,attributes:!0,attributeFilter:["value"]}),Wr(()=>{e.disconnect()})}function Do(t,e,r=e){var s=!0;Qr(t,"change",o=>{var a=o?"[selected]":":checked",n;if(t.multiple)n=[].map.call(t.querySelectorAll(a),me);else{var l=t.querySelector(a)??t.querySelector("option:not([disabled])");n=l&&me(l)}r(n)}),ot(()=>{var o=e();if(Ce(t,o,s),s&&o===void 0){var a=t.querySelector(":checked");a!==null&&(o=me(a),r(o))}t.__value=o,s=!1}),Yt(t)}function me(t){return"__value"in t?t.__value:t.value}const Oe=Symbol("class"),Pe=Symbol("style"),Zt=Symbol("is custom element"),er=Symbol("is html");function Lo(t){if(Y){var e=!1,r=()=>{if(!e){if(e=!0,t.hasAttribute("value")){var s=t.value;Ue(t,"value",null),t.value=s}if(t.hasAttribute("checked")){var o=t.checked;Ue(t,"checked",null),t.checked=o}}};t.__on_r=r,ns(r),ls()}}function jo(t,e){var r=lt(t);r.value===(r.value=e??void 0)||t.value===e&&(e!==0||t.nodeName!=="PROGRESS")||(t.value=e??"")}function ws(t,e){e?t.hasAttribute("selected")||t.setAttribute("selected",""):t.removeAttribute("selected")}function Ue(t,e,r,s){var o=lt(t);Y&&(o[e]=t.getAttribute(e),e==="src"||e==="srcset"||e==="href"&&t.nodeName==="LINK")||o[e]!==(o[e]=r)&&(e==="loading"&&(t[Jr]=r),r==null?t.removeAttribute(e):typeof r!="string"&&tr(t).includes(e)?t[e]=r:t.setAttribute(e,r))}function Is(t,e,r,s,o=!1){var a=lt(t),n=a[Zt],l=!a[er];let i=Y&&n;i&&Ot(!1);var c=e||{},p=t.tagName==="OPTION";for(var R in e)R in r||(r[R]=null);r.class?r.class=As(r.class):r[Oe]&&(r.class=null),r[Pe]&&(r.style??=null);var I=tr(t);for(const E in r){let v=r[E];if(p&&E==="value"&&v==null){t.value=t.__value="",c[E]=v;continue}if(E==="class"){var T=t.namespaceURI==="http://www.w3.org/1999/xhtml";Ee(t,T,v,s,e?.[Oe],r[Oe]),c[E]=v,c[Oe]=r[Oe];continue}if(E==="style"){ys(t,v,e?.[Pe],r[Pe]),c[E]=v,c[Pe]=r[Pe];continue}var f=c[E];if(!(v===f&&!(v===void 0&&t.hasAttribute(E)))){c[E]=v;var y=E[0]+E[1];if(y!=="$$")if(y==="on"){const U={},L="$$"+E;let B=E.slice(2);var w=is(B);if(es(B)&&(B=B.slice(0,-7),U.capture=!0),!w&&f){if(v!=null)continue;t.removeEventListener(B,c[L],U),c[L]=null}if(v!=null)if(w)t[`__${B}`]=v,rs([B]);else{let Z=function(ce){c[E].call(this,ce)};c[L]=ts(B,t,Z,U)}else w&&(t[`__${B}`]=void 0)}else if(E==="style")Ue(t,E,v);else if(E==="autofocus")ss(t,!!v);else if(!n&&(E==="__value"||E==="value"&&v!=null))t.value=t.__value=v;else if(E==="selected"&&p)ws(t,v);else{var C=E;l||(C=as(C));var D=C==="defaultValue"||C==="defaultChecked";if(v==null&&!n&&!D)if(a[E]=null,C==="value"||C==="checked"){let U=t;const L=e===void 0;if(C==="value"){let B=U.defaultValue;U.removeAttribute(C),U.defaultValue=B,U.value=U.__value=L?B:null}else{let B=U.defaultChecked;U.removeAttribute(C),U.defaultChecked=B,U.checked=L?B:!1}}else t.removeAttribute(E);else D||I.includes(C)&&(n||typeof v!="string")?(t[C]=v,C in a&&(a[C]=os)):typeof v!="function"&&Ue(t,C,v)}}}return i&&Ot(!0),c}function Es(t,e,r=[],s=[],o,a=!1){Mr(r,s,n=>{var l=void 0,i={},c=t.nodeName==="SELECT",p=!1;if(Jt(()=>{var I=e(...n.map(k)),T=Is(t,l,I,o,a);p&&c&&"value"in I&&Ce(t,I.value);for(let y of Object.getOwnPropertySymbols(i))I[y]||Xe(i[y]);for(let y of Object.getOwnPropertySymbols(I)){var f=I[y];y.description===Yr&&(!l||f!==l[y])&&(i[y]&&Xe(i[y]),i[y]=Kt(()=>Vs(t,()=>f))),T[y]=f}l=T}),c){var R=t;ot(()=>{Ce(R,l.value,!0),Yt(R)})}p=!0})}function lt(t){return t.__attributes??={[Zt]:t.nodeName.includes("-"),[er]:t.namespaceURI===Kr}}var At=new Map;function tr(t){var e=At.get(t.nodeName);if(e)return e;At.set(t.nodeName,e=[]);for(var r,s=t,o=Element.prototype;o!==s;){r=Zr(s);for(var a in r)r[a].set&&e.push(a);s=Xr(s)}return e}function ko(t,e){return Tr+Br(t,e)}function rr(t,e){return function(){return t.apply(e,arguments)}}const{toString:gs}=Object.prototype,{getPrototypeOf:it}=Object,{iterator:De,toStringTag:sr}=Symbol,Le=(t=>e=>{const r=gs.call(e);return t[r]||(t[r]=r.slice(8,-1).toLowerCase())})(Object.create(null)),N=t=>(t=t.toLowerCase(),e=>Le(e)===t),je=t=>e=>typeof e===t,{isArray:de}=Array,Ve=je("undefined");function Se(t){return t!==null&&!Ve(t)&&t.constructor!==null&&!Ve(t.constructor)&&_(t.constructor.isBuffer)&&t.constructor.isBuffer(t)}const ar=N("ArrayBuffer");function xs(t){let e;return typeof ArrayBuffer<"u"&&ArrayBuffer.isView?e=ArrayBuffer.isView(t):e=t&&t.buffer&&ar(t.buffer),e}const vs=je("string"),_=je("function"),or=je("number"),Ae=t=>t!==null&&typeof t=="object",Cs=t=>t===!0||t===!1,ge=t=>{if(Le(t)!=="object")return!1;const e=it(t);return(e===null||e===Object.prototype||Object.getPrototypeOf(e)===null)&&!(sr in t)&&!(De in t)},Us=t=>{if(!Ae(t)||Se(t))return!1;try{return Object.keys(t).length===0&&Object.getPrototypeOf(t)===Object.prototype}catch{return!1}},Ts=N("Date"),Bs=N("File"),Ds=N("Blob"),Ls=N("FileList"),js=t=>Ae(t)&&_(t.pipe),ks=t=>{let e;return t&&(typeof FormData=="function"&&t instanceof FormData||_(t.append)&&((e=Le(t))==="formdata"||e==="object"&&_(t.toString)&&t.toString()==="[object FormData]"))},zs=N("URLSearchParams"),[Fs,Gs,_s,qs]=["ReadableStream","Request","Response","Headers"].map(N),Hs=t=>t.trim?t.trim():t.replace(/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,"");function Re(t,e,{allOwnKeys:r=!1}={}){if(t===null||typeof t>"u")return;let s,o;if(typeof t!="object"&&(t=[t]),de(t))for(s=0,o=t.length;s0;)if(o=r[s],e===o.toLowerCase())return o;return null}const ne=typeof globalThis<"u"?globalThis:typeof self<"u"?self:typeof window<"u"?window:global,lr=t=>!Ve(t)&&t!==ne;function Ye(){const{caseless:t}=lr(this)&&this||{},e={},r=(s,o)=>{const a=t&&nr(e,o)||o;ge(e[a])&&ge(s)?e[a]=Ye(e[a],s):ge(s)?e[a]=Ye({},s):de(s)?e[a]=s.slice():e[a]=s};for(let s=0,o=arguments.length;s(Re(e,(o,a)=>{r&&_(o)?t[a]=rr(o,r):t[a]=o},{allOwnKeys:s}),t),Ns=t=>(t.charCodeAt(0)===65279&&(t=t.slice(1)),t),Ws=(t,e,r,s)=>{t.prototype=Object.create(e.prototype,s),t.prototype.constructor=t,Object.defineProperty(t,"super",{value:e.prototype}),r&&Object.assign(t.prototype,r)},Qs=(t,e,r,s)=>{let o,a,n;const l={};if(e=e||{},t==null)return e;do{for(o=Object.getOwnPropertyNames(t),a=o.length;a-- >0;)n=o[a],(!s||s(n,t,e))&&!l[n]&&(e[n]=t[n],l[n]=!0);t=r!==!1&&it(t)}while(t&&(!r||r(t,e))&&t!==Object.prototype);return e},Ms=(t,e,r)=>{t=String(t),(r===void 0||r>t.length)&&(r=t.length),r-=e.length;const s=t.indexOf(e,r);return s!==-1&&s===r},Js=t=>{if(!t)return null;if(de(t))return t;let e=t.length;if(!or(e))return null;const r=new Array(e);for(;e-- >0;)r[e]=t[e];return r},Ks=(t=>e=>t&&e instanceof t)(typeof Uint8Array<"u"&&it(Uint8Array)),Xs=(t,e)=>{const s=(t&&t[De]).call(t);let o;for(;(o=s.next())&&!o.done;){const a=o.value;e.call(t,a[0],a[1])}},Ys=(t,e)=>{let r;const s=[];for(;(r=t.exec(e))!==null;)s.push(r);return s},Zs=N("HTMLFormElement"),ea=t=>t.toLowerCase().replace(/[-_\s]([a-z\d])(\w*)/g,function(r,s,o){return s.toUpperCase()+o}),Rt=(({hasOwnProperty:t})=>(e,r)=>t.call(e,r))(Object.prototype),ta=N("RegExp"),ir=(t,e)=>{const r=Object.getOwnPropertyDescriptors(t),s={};Re(r,(o,a)=>{let n;(n=e(o,a,t))!==!1&&(s[a]=n||o)}),Object.defineProperties(t,s)},ra=t=>{ir(t,(e,r)=>{if(_(t)&&["arguments","caller","callee"].indexOf(r)!==-1)return!1;const s=t[r];if(_(s)){if(e.enumerable=!1,"writable"in e){e.writable=!1;return}e.set||(e.set=()=>{throw Error("Can not rewrite read-only method '"+r+"'")})}})},sa=(t,e)=>{const r={},s=o=>{o.forEach(a=>{r[a]=!0})};return de(t)?s(t):s(String(t).split(e)),r},aa=()=>{},oa=(t,e)=>t!=null&&Number.isFinite(t=+t)?t:e;function na(t){return!!(t&&_(t.append)&&t[sr]==="FormData"&&t[De])}const la=t=>{const e=new Array(10),r=(s,o)=>{if(Ae(s)){if(e.indexOf(s)>=0)return;if(Se(s))return s;if(!("toJSON"in s)){e[o]=s;const a=de(s)?[]:{};return Re(s,(n,l)=>{const i=r(n,o+1);!Ve(i)&&(a[l]=i)}),e[o]=void 0,a}}return s};return r(t,0)},ia=N("AsyncFunction"),ca=t=>t&&(Ae(t)||_(t))&&_(t.then)&&_(t.catch),cr=((t,e)=>t?setImmediate:e?((r,s)=>(ne.addEventListener("message",({source:o,data:a})=>{o===ne&&a===r&&s.length&&s.shift()()},!1),o=>{s.push(o),ne.postMessage(r,"*")}))(`axios@${Math.random()}`,[]):r=>setTimeout(r))(typeof setImmediate=="function",_(ne.postMessage)),pa=typeof queueMicrotask<"u"?queueMicrotask.bind(ne):typeof process<"u"&&process.nextTick||cr,da=t=>t!=null&&_(t[De]),h={isArray:de,isArrayBuffer:ar,isBuffer:Se,isFormData:ks,isArrayBufferView:xs,isString:vs,isNumber:or,isBoolean:Cs,isObject:Ae,isPlainObject:ge,isEmptyObject:Us,isReadableStream:Fs,isRequest:Gs,isResponse:_s,isHeaders:qs,isUndefined:Ve,isDate:Ts,isFile:Bs,isBlob:Ds,isRegExp:ta,isFunction:_,isStream:js,isURLSearchParams:zs,isTypedArray:Ks,isFileList:Ls,forEach:Re,merge:Ye,extend:$s,trim:Hs,stripBOM:Ns,inherits:Ws,toFlatObject:Qs,kindOf:Le,kindOfTest:N,endsWith:Ms,toArray:Js,forEachEntry:Xs,matchAll:Ys,isHTMLForm:Zs,hasOwnProperty:Rt,hasOwnProp:Rt,reduceDescriptors:ir,freezeMethods:ra,toObjectSet:sa,toCamelCase:ea,noop:aa,toFiniteNumber:oa,findKey:nr,global:ne,isContextDefined:lr,isSpecCompliantForm:na,toJSONObject:la,isAsyncFn:ia,isThenable:ca,setImmediate:cr,asap:pa,isIterable:da};function g(t,e,r,s,o){Error.call(this),Error.captureStackTrace?Error.captureStackTrace(this,this.constructor):this.stack=new Error().stack,this.message=t,this.name="AxiosError",e&&(this.code=e),r&&(this.config=r),s&&(this.request=s),o&&(this.response=o,this.status=o.status?o.status:null)}h.inherits(g,Error,{toJSON:function(){return{message:this.message,name:this.name,description:this.description,number:this.number,fileName:this.fileName,lineNumber:this.lineNumber,columnNumber:this.columnNumber,stack:this.stack,config:h.toJSONObject(this.config),code:this.code,status:this.status}}});const pr=g.prototype,dr={};["ERR_BAD_OPTION_VALUE","ERR_BAD_OPTION","ECONNABORTED","ETIMEDOUT","ERR_NETWORK","ERR_FR_TOO_MANY_REDIRECTS","ERR_DEPRECATED","ERR_BAD_RESPONSE","ERR_BAD_REQUEST","ERR_CANCELED","ERR_NOT_SUPPORT","ERR_INVALID_URL"].forEach(t=>{dr[t]={value:t}});Object.defineProperties(g,dr);Object.defineProperty(pr,"isAxiosError",{value:!0});g.from=(t,e,r,s,o,a)=>{const n=Object.create(pr);return h.toFlatObject(t,n,function(i){return i!==Error.prototype},l=>l!=="isAxiosError"),g.call(n,t.message,e,r,s,o),n.cause=t,n.name=t.name,a&&Object.assign(n,a),n};const ha=null;function Ze(t){return h.isPlainObject(t)||h.isArray(t)}function hr(t){return h.endsWith(t,"[]")?t.slice(0,-2):t}function ft(t,e,r){return t?t.concat(e).map(function(o,a){return o=hr(o),!r&&a?"["+o+"]":o}).join(r?".":""):e}function ua(t){return h.isArray(t)&&!t.some(Ze)}const Oa=h.toFlatObject(h,{},null,function(e){return/^is[A-Z]/.test(e)});function ke(t,e,r){if(!h.isObject(t))throw new TypeError("target must be an object");e=e||new FormData,r=h.toFlatObject(r,{metaTokens:!0,dots:!1,indexes:!1},!1,function(y,w){return!h.isUndefined(w[y])});const s=r.metaTokens,o=r.visitor||p,a=r.dots,n=r.indexes,i=(r.Blob||typeof Blob<"u"&&Blob)&&h.isSpecCompliantForm(e);if(!h.isFunction(o))throw new TypeError("visitor must be a function");function c(f){if(f===null)return"";if(h.isDate(f))return f.toISOString();if(h.isBoolean(f))return f.toString();if(!i&&h.isBlob(f))throw new g("Blob is not supported. Use a Buffer instead.");return h.isArrayBuffer(f)||h.isTypedArray(f)?i&&typeof Blob=="function"?new Blob([f]):Buffer.from(f):f}function p(f,y,w){let C=f;if(f&&!w&&typeof f=="object"){if(h.endsWith(y,"{}"))y=s?y:y.slice(0,-2),f=JSON.stringify(f);else if(h.isArray(f)&&ua(f)||(h.isFileList(f)||h.endsWith(y,"[]"))&&(C=h.toArray(f)))return y=hr(y),C.forEach(function(E,v){!(h.isUndefined(E)||E===null)&&e.append(n===!0?ft([y],v,a):n===null?y:y+"[]",c(E))}),!1}return Ze(f)?!0:(e.append(ft(w,y,a),c(f)),!1)}const R=[],I=Object.assign(Oa,{defaultVisitor:p,convertValue:c,isVisitable:Ze});function T(f,y){if(!h.isUndefined(f)){if(R.indexOf(f)!==-1)throw Error("Circular reference detected in "+y.join("."));R.push(f),h.forEach(f,function(C,D){(!(h.isUndefined(C)||C===null)&&o.call(e,C,h.isString(D)?D.trim():D,y,I))===!0&&T(C,y?y.concat(D):[D])}),R.pop()}}if(!h.isObject(t))throw new TypeError("data must be an object");return T(t),e}function yt(t){const e={"!":"%21","'":"%27","(":"%28",")":"%29","~":"%7E","%20":"+","%00":"\0"};return encodeURIComponent(t).replace(/[!'()~]|%20|%00/g,function(s){return e[s]})}function ct(t,e){this._pairs=[],t&&ke(t,this,e)}const ur=ct.prototype;ur.append=function(e,r){this._pairs.push([e,r])};ur.toString=function(e){const r=e?function(s){return e.call(this,s,yt)}:yt;return this._pairs.map(function(o){return r(o[0])+"="+r(o[1])},"").join("&")};function Pa(t){return encodeURIComponent(t).replace(/%3A/gi,":").replace(/%24/g,"$").replace(/%2C/gi,",").replace(/%20/g,"+").replace(/%5B/gi,"[").replace(/%5D/gi,"]")}function Or(t,e,r){if(!e)return t;const s=r&&r.encode||Pa;h.isFunction(r)&&(r={serialize:r});const o=r&&r.serialize;let a;if(o?a=o(e,r):a=h.isURLSearchParams(e)?e.toString():new ct(e,r).toString(s),a){const n=t.indexOf("#");n!==-1&&(t=t.slice(0,n)),t+=(t.indexOf("?")===-1?"?":"&")+a}return t}class wt{constructor(){this.handlers=[]}use(e,r,s){return this.handlers.push({fulfilled:e,rejected:r,synchronous:s?s.synchronous:!1,runWhen:s?s.runWhen:null}),this.handlers.length-1}eject(e){this.handlers[e]&&(this.handlers[e]=null)}clear(){this.handlers&&(this.handlers=[])}forEach(e){h.forEach(this.handlers,function(s){s!==null&&e(s)})}}const Pr={silentJSONParsing:!0,forcedJSONParsing:!0,clarifyTimeoutError:!1},ba=typeof URLSearchParams<"u"?URLSearchParams:ct,ma=typeof FormData<"u"?FormData:null,Va=typeof Blob<"u"?Blob:null,Sa={isBrowser:!0,classes:{URLSearchParams:ba,FormData:ma,Blob:Va},protocols:["http","https","file","blob","url","data"]},pt=typeof window<"u"&&typeof document<"u",et=typeof navigator=="object"&&navigator||void 0,Aa=pt&&(!et||["ReactNative","NativeScript","NS"].indexOf(et.product)<0),Ra=typeof WorkerGlobalScope<"u"&&self instanceof WorkerGlobalScope&&typeof self.importScripts=="function",fa=pt&&window.location.href||"http://localhost",ya=Object.freeze(Object.defineProperty({__proto__:null,hasBrowserEnv:pt,hasStandardBrowserEnv:Aa,hasStandardBrowserWebWorkerEnv:Ra,navigator:et,origin:fa},Symbol.toStringTag,{value:"Module"})),G={...ya,...Sa};function wa(t,e){return ke(t,new G.classes.URLSearchParams,{visitor:function(r,s,o,a){return G.isNode&&h.isBuffer(r)?(this.append(s,r.toString("base64")),!1):a.defaultVisitor.apply(this,arguments)},...e})}function Ia(t){return h.matchAll(/\w+|\[(\w*)]/g,t).map(e=>e[0]==="[]"?"":e[1]||e[0])}function Ea(t){const e={},r=Object.keys(t);let s;const o=r.length;let a;for(s=0;s=r.length;return n=!n&&h.isArray(o)?o.length:n,i?(h.hasOwnProp(o,n)?o[n]=[o[n],s]:o[n]=s,!l):((!o[n]||!h.isObject(o[n]))&&(o[n]=[]),e(r,s,o[n],a)&&h.isArray(o[n])&&(o[n]=Ea(o[n])),!l)}if(h.isFormData(t)&&h.isFunction(t.entries)){const r={};return h.forEachEntry(t,(s,o)=>{e(Ia(s),o,r,0)}),r}return null}function ga(t,e,r){if(h.isString(t))try{return(e||JSON.parse)(t),h.trim(t)}catch(s){if(s.name!=="SyntaxError")throw s}return(r||JSON.stringify)(t)}const fe={transitional:Pr,adapter:["xhr","http","fetch"],transformRequest:[function(e,r){const s=r.getContentType()||"",o=s.indexOf("application/json")>-1,a=h.isObject(e);if(a&&h.isHTMLForm(e)&&(e=new FormData(e)),h.isFormData(e))return o?JSON.stringify(br(e)):e;if(h.isArrayBuffer(e)||h.isBuffer(e)||h.isStream(e)||h.isFile(e)||h.isBlob(e)||h.isReadableStream(e))return e;if(h.isArrayBufferView(e))return e.buffer;if(h.isURLSearchParams(e))return r.setContentType("application/x-www-form-urlencoded;charset=utf-8",!1),e.toString();let l;if(a){if(s.indexOf("application/x-www-form-urlencoded")>-1)return wa(e,this.formSerializer).toString();if((l=h.isFileList(e))||s.indexOf("multipart/form-data")>-1){const i=this.env&&this.env.FormData;return ke(l?{"files[]":e}:e,i&&new i,this.formSerializer)}}return a||o?(r.setContentType("application/json",!1),ga(e)):e}],transformResponse:[function(e){const r=this.transitional||fe.transitional,s=r&&r.forcedJSONParsing,o=this.responseType==="json";if(h.isResponse(e)||h.isReadableStream(e))return e;if(e&&h.isString(e)&&(s&&!this.responseType||o)){const n=!(r&&r.silentJSONParsing)&&o;try{return JSON.parse(e)}catch(l){if(n)throw l.name==="SyntaxError"?g.from(l,g.ERR_BAD_RESPONSE,this,null,this.response):l}}return e}],timeout:0,xsrfCookieName:"XSRF-TOKEN",xsrfHeaderName:"X-XSRF-TOKEN",maxContentLength:-1,maxBodyLength:-1,env:{FormData:G.classes.FormData,Blob:G.classes.Blob},validateStatus:function(e){return e>=200&&e<300},headers:{common:{Accept:"application/json, text/plain, */*","Content-Type":void 0}}};h.forEach(["delete","get","head","post","put","patch"],t=>{fe.headers[t]={}});const xa=h.toObjectSet(["age","authorization","content-length","content-type","etag","expires","from","host","if-modified-since","if-unmodified-since","last-modified","location","max-forwards","proxy-authorization","referer","retry-after","user-agent"]),va=t=>{const e={};let r,s,o;return t&&t.split(` -`).forEach(function(n){o=n.indexOf(":"),r=n.substring(0,o).trim().toLowerCase(),s=n.substring(o+1).trim(),!(!r||e[r]&&xa[r])&&(r==="set-cookie"?e[r]?e[r].push(s):e[r]=[s]:e[r]=e[r]?e[r]+", "+s:s)}),e},It=Symbol("internals");function be(t){return t&&String(t).trim().toLowerCase()}function xe(t){return t===!1||t==null?t:h.isArray(t)?t.map(xe):String(t)}function Ca(t){const e=Object.create(null),r=/([^\s,;=]+)\s*(?:=\s*([^,;]+))?/g;let s;for(;s=r.exec(t);)e[s[1]]=s[2];return e}const Ua=t=>/^[-_a-zA-Z0-9^`|~,!#$%&'*+.]+$/.test(t.trim());function Qe(t,e,r,s,o){if(h.isFunction(s))return s.call(this,e,r);if(o&&(e=r),!!h.isString(e)){if(h.isString(s))return e.indexOf(s)!==-1;if(h.isRegExp(s))return s.test(e)}}function Ta(t){return t.trim().toLowerCase().replace(/([a-z\d])(\w*)/g,(e,r,s)=>r.toUpperCase()+s)}function Ba(t,e){const r=h.toCamelCase(" "+e);["get","set","has"].forEach(s=>{Object.defineProperty(t,s+r,{value:function(o,a,n){return this[s].call(this,e,o,a,n)},configurable:!0})})}let q=class{constructor(e){e&&this.set(e)}set(e,r,s){const o=this;function a(l,i,c){const p=be(i);if(!p)throw new Error("header name must be a non-empty string");const R=h.findKey(o,p);(!R||o[R]===void 0||c===!0||c===void 0&&o[R]!==!1)&&(o[R||i]=xe(l))}const n=(l,i)=>h.forEach(l,(c,p)=>a(c,p,i));if(h.isPlainObject(e)||e instanceof this.constructor)n(e,r);else if(h.isString(e)&&(e=e.trim())&&!Ua(e))n(va(e),r);else if(h.isObject(e)&&h.isIterable(e)){let l={},i,c;for(const p of e){if(!h.isArray(p))throw TypeError("Object iterator must return a key-value pair");l[c=p[0]]=(i=l[c])?h.isArray(i)?[...i,p[1]]:[i,p[1]]:p[1]}n(l,r)}else e!=null&&a(r,e,s);return this}get(e,r){if(e=be(e),e){const s=h.findKey(this,e);if(s){const o=this[s];if(!r)return o;if(r===!0)return Ca(o);if(h.isFunction(r))return r.call(this,o,s);if(h.isRegExp(r))return r.exec(o);throw new TypeError("parser must be boolean|regexp|function")}}}has(e,r){if(e=be(e),e){const s=h.findKey(this,e);return!!(s&&this[s]!==void 0&&(!r||Qe(this,this[s],s,r)))}return!1}delete(e,r){const s=this;let o=!1;function a(n){if(n=be(n),n){const l=h.findKey(s,n);l&&(!r||Qe(s,s[l],l,r))&&(delete s[l],o=!0)}}return h.isArray(e)?e.forEach(a):a(e),o}clear(e){const r=Object.keys(this);let s=r.length,o=!1;for(;s--;){const a=r[s];(!e||Qe(this,this[a],a,e,!0))&&(delete this[a],o=!0)}return o}normalize(e){const r=this,s={};return h.forEach(this,(o,a)=>{const n=h.findKey(s,a);if(n){r[n]=xe(o),delete r[a];return}const l=e?Ta(a):String(a).trim();l!==a&&delete r[a],r[l]=xe(o),s[l]=!0}),this}concat(...e){return this.constructor.concat(this,...e)}toJSON(e){const r=Object.create(null);return h.forEach(this,(s,o)=>{s!=null&&s!==!1&&(r[o]=e&&h.isArray(s)?s.join(", "):s)}),r}[Symbol.iterator](){return Object.entries(this.toJSON())[Symbol.iterator]()}toString(){return Object.entries(this.toJSON()).map(([e,r])=>e+": "+r).join(` -`)}getSetCookie(){return this.get("set-cookie")||[]}get[Symbol.toStringTag](){return"AxiosHeaders"}static from(e){return e instanceof this?e:new this(e)}static concat(e,...r){const s=new this(e);return r.forEach(o=>s.set(o)),s}static accessor(e){const s=(this[It]=this[It]={accessors:{}}).accessors,o=this.prototype;function a(n){const l=be(n);s[l]||(Ba(o,n),s[l]=!0)}return h.isArray(e)?e.forEach(a):a(e),this}};q.accessor(["Content-Type","Content-Length","Accept","Accept-Encoding","User-Agent","Authorization"]);h.reduceDescriptors(q.prototype,({value:t},e)=>{let r=e[0].toUpperCase()+e.slice(1);return{get:()=>t,set(s){this[r]=s}}});h.freezeMethods(q);function Me(t,e){const r=this||fe,s=e||r,o=q.from(s.headers);let a=s.data;return h.forEach(t,function(l){a=l.call(r,a,o.normalize(),e?e.status:void 0)}),o.normalize(),a}function mr(t){return!!(t&&t.__CANCEL__)}function he(t,e,r){g.call(this,t??"canceled",g.ERR_CANCELED,e,r),this.name="CanceledError"}h.inherits(he,g,{__CANCEL__:!0});function Vr(t,e,r){const s=r.config.validateStatus;!r.status||!s||s(r.status)?t(r):e(new g("Request failed with status code "+r.status,[g.ERR_BAD_REQUEST,g.ERR_BAD_RESPONSE][Math.floor(r.status/100)-4],r.config,r.request,r))}function Da(t){const e=/^([-+\w]{1,25})(:?\/\/|:)/.exec(t);return e&&e[1]||""}function La(t,e){t=t||10;const r=new Array(t),s=new Array(t);let o=0,a=0,n;return e=e!==void 0?e:1e3,function(i){const c=Date.now(),p=s[a];n||(n=c),r[o]=i,s[o]=c;let R=a,I=0;for(;R!==o;)I+=r[R++],R=R%t;if(o=(o+1)%t,o===a&&(a=(a+1)%t),c-n{r=p,o=null,a&&(clearTimeout(a),a=null),t(...c)};return[(...c)=>{const p=Date.now(),R=p-r;R>=s?n(c,p):(o=c,a||(a=setTimeout(()=>{a=null,n(o)},s-R)))},()=>o&&n(o)]}const Te=(t,e,r=3)=>{let s=0;const o=La(50,250);return ja(a=>{const n=a.loaded,l=a.lengthComputable?a.total:void 0,i=n-s,c=o(i),p=n<=l;s=n;const R={loaded:n,total:l,progress:l?n/l:void 0,bytes:i,rate:c||void 0,estimated:c&&l&&p?(l-n)/c:void 0,event:a,lengthComputable:l!=null,[e?"download":"upload"]:!0};t(R)},r)},Et=(t,e)=>{const r=t!=null;return[s=>e[0]({lengthComputable:r,total:t,loaded:s}),e[1]]},gt=t=>(...e)=>h.asap(()=>t(...e)),ka=G.hasStandardBrowserEnv?((t,e)=>r=>(r=new URL(r,G.origin),t.protocol===r.protocol&&t.host===r.host&&(e||t.port===r.port)))(new URL(G.origin),G.navigator&&/(msie|trident)/i.test(G.navigator.userAgent)):()=>!0,za=G.hasStandardBrowserEnv?{write(t,e,r,s,o,a){const n=[t+"="+encodeURIComponent(e)];h.isNumber(r)&&n.push("expires="+new Date(r).toGMTString()),h.isString(s)&&n.push("path="+s),h.isString(o)&&n.push("domain="+o),a===!0&&n.push("secure"),document.cookie=n.join("; ")},read(t){const e=document.cookie.match(new RegExp("(^|;\\s*)("+t+")=([^;]*)"));return e?decodeURIComponent(e[3]):null},remove(t){this.write(t,"",Date.now()-864e5)}}:{write(){},read(){return null},remove(){}};function Fa(t){return/^([a-z][a-z\d+\-.]*:)?\/\//i.test(t)}function Ga(t,e){return e?t.replace(/\/?\/$/,"")+"/"+e.replace(/^\/+/,""):t}function Sr(t,e,r){let s=!Fa(e);return t&&(s||r==!1)?Ga(t,e):e}const xt=t=>t instanceof q?{...t}:t;function ie(t,e){e=e||{};const r={};function s(c,p,R,I){return h.isPlainObject(c)&&h.isPlainObject(p)?h.merge.call({caseless:I},c,p):h.isPlainObject(p)?h.merge({},p):h.isArray(p)?p.slice():p}function o(c,p,R,I){if(h.isUndefined(p)){if(!h.isUndefined(c))return s(void 0,c,R,I)}else return s(c,p,R,I)}function a(c,p){if(!h.isUndefined(p))return s(void 0,p)}function n(c,p){if(h.isUndefined(p)){if(!h.isUndefined(c))return s(void 0,c)}else return s(void 0,p)}function l(c,p,R){if(R in e)return s(c,p);if(R in t)return s(void 0,c)}const i={url:a,method:a,data:a,baseURL:n,transformRequest:n,transformResponse:n,paramsSerializer:n,timeout:n,timeoutMessage:n,withCredentials:n,withXSRFToken:n,adapter:n,responseType:n,xsrfCookieName:n,xsrfHeaderName:n,onUploadProgress:n,onDownloadProgress:n,decompress:n,maxContentLength:n,maxBodyLength:n,beforeRedirect:n,transport:n,httpAgent:n,httpsAgent:n,cancelToken:n,socketPath:n,responseEncoding:n,validateStatus:l,headers:(c,p,R)=>o(xt(c),xt(p),R,!0)};return h.forEach(Object.keys({...t,...e}),function(p){const R=i[p]||o,I=R(t[p],e[p],p);h.isUndefined(I)&&R!==l||(r[p]=I)}),r}const Ar=t=>{const e=ie({},t);let{data:r,withXSRFToken:s,xsrfHeaderName:o,xsrfCookieName:a,headers:n,auth:l}=e;e.headers=n=q.from(n),e.url=Or(Sr(e.baseURL,e.url,e.allowAbsoluteUrls),t.params,t.paramsSerializer),l&&n.set("Authorization","Basic "+btoa((l.username||"")+":"+(l.password?unescape(encodeURIComponent(l.password)):"")));let i;if(h.isFormData(r)){if(G.hasStandardBrowserEnv||G.hasStandardBrowserWebWorkerEnv)n.setContentType(void 0);else if((i=n.getContentType())!==!1){const[c,...p]=i?i.split(";").map(R=>R.trim()).filter(Boolean):[];n.setContentType([c||"multipart/form-data",...p].join("; "))}}if(G.hasStandardBrowserEnv&&(s&&h.isFunction(s)&&(s=s(e)),s||s!==!1&&ka(e.url))){const c=o&&a&&za.read(a);c&&n.set(o,c)}return e},_a=typeof XMLHttpRequest<"u",qa=_a&&function(t){return new Promise(function(r,s){const o=Ar(t);let a=o.data;const n=q.from(o.headers).normalize();let{responseType:l,onUploadProgress:i,onDownloadProgress:c}=o,p,R,I,T,f;function y(){T&&T(),f&&f(),o.cancelToken&&o.cancelToken.unsubscribe(p),o.signal&&o.signal.removeEventListener("abort",p)}let w=new XMLHttpRequest;w.open(o.method.toUpperCase(),o.url,!0),w.timeout=o.timeout;function C(){if(!w)return;const E=q.from("getAllResponseHeaders"in w&&w.getAllResponseHeaders()),U={data:!l||l==="text"||l==="json"?w.responseText:w.response,status:w.status,statusText:w.statusText,headers:E,config:t,request:w};Vr(function(B){r(B),y()},function(B){s(B),y()},U),w=null}"onloadend"in w?w.onloadend=C:w.onreadystatechange=function(){!w||w.readyState!==4||w.status===0&&!(w.responseURL&&w.responseURL.indexOf("file:")===0)||setTimeout(C)},w.onabort=function(){w&&(s(new g("Request aborted",g.ECONNABORTED,t,w)),w=null)},w.onerror=function(){s(new g("Network Error",g.ERR_NETWORK,t,w)),w=null},w.ontimeout=function(){let v=o.timeout?"timeout of "+o.timeout+"ms exceeded":"timeout exceeded";const U=o.transitional||Pr;o.timeoutErrorMessage&&(v=o.timeoutErrorMessage),s(new g(v,U.clarifyTimeoutError?g.ETIMEDOUT:g.ECONNABORTED,t,w)),w=null},a===void 0&&n.setContentType(null),"setRequestHeader"in w&&h.forEach(n.toJSON(),function(v,U){w.setRequestHeader(U,v)}),h.isUndefined(o.withCredentials)||(w.withCredentials=!!o.withCredentials),l&&l!=="json"&&(w.responseType=o.responseType),c&&([I,f]=Te(c,!0),w.addEventListener("progress",I)),i&&w.upload&&([R,T]=Te(i),w.upload.addEventListener("progress",R),w.upload.addEventListener("loadend",T)),(o.cancelToken||o.signal)&&(p=E=>{w&&(s(!E||E.type?new he(null,t,w):E),w.abort(),w=null)},o.cancelToken&&o.cancelToken.subscribe(p),o.signal&&(o.signal.aborted?p():o.signal.addEventListener("abort",p)));const D=Da(o.url);if(D&&G.protocols.indexOf(D)===-1){s(new g("Unsupported protocol "+D+":",g.ERR_BAD_REQUEST,t));return}w.send(a||null)})},Ha=(t,e)=>{const{length:r}=t=t?t.filter(Boolean):[];if(e||r){let s=new AbortController,o;const a=function(c){if(!o){o=!0,l();const p=c instanceof Error?c:this.reason;s.abort(p instanceof g?p:new he(p instanceof Error?p.message:p))}};let n=e&&setTimeout(()=>{n=null,a(new g(`timeout ${e} of ms exceeded`,g.ETIMEDOUT))},e);const l=()=>{t&&(n&&clearTimeout(n),n=null,t.forEach(c=>{c.unsubscribe?c.unsubscribe(a):c.removeEventListener("abort",a)}),t=null)};t.forEach(c=>c.addEventListener("abort",a));const{signal:i}=s;return i.unsubscribe=()=>h.asap(l),i}},$a=function*(t,e){let r=t.byteLength;if(r{const o=Na(t,e);let a=0,n,l=i=>{n||(n=!0,s&&s(i))};return new ReadableStream({async pull(i){try{const{done:c,value:p}=await o.next();if(c){l(),i.close();return}let R=p.byteLength;if(r){let I=a+=R;r(I)}i.enqueue(new Uint8Array(p))}catch(c){throw l(c),c}},cancel(i){return l(i),o.return()}},{highWaterMark:2})},ze=typeof fetch=="function"&&typeof Request=="function"&&typeof Response=="function",Rr=ze&&typeof ReadableStream=="function",Qa=ze&&(typeof TextEncoder=="function"?(t=>e=>t.encode(e))(new TextEncoder):async t=>new Uint8Array(await new Response(t).arrayBuffer())),fr=(t,...e)=>{try{return!!t(...e)}catch{return!1}},Ma=Rr&&fr(()=>{let t=!1;const e=new Request(G.origin,{body:new ReadableStream,method:"POST",get duplex(){return t=!0,"half"}}).headers.has("Content-Type");return t&&!e}),Ct=64*1024,tt=Rr&&fr(()=>h.isReadableStream(new Response("").body)),Be={stream:tt&&(t=>t.body)};ze&&(t=>{["text","arrayBuffer","blob","formData","stream"].forEach(e=>{!Be[e]&&(Be[e]=h.isFunction(t[e])?r=>r[e]():(r,s)=>{throw new g(`Response type '${e}' is not supported`,g.ERR_NOT_SUPPORT,s)})})})(new Response);const Ja=async t=>{if(t==null)return 0;if(h.isBlob(t))return t.size;if(h.isSpecCompliantForm(t))return(await new Request(G.origin,{method:"POST",body:t}).arrayBuffer()).byteLength;if(h.isArrayBufferView(t)||h.isArrayBuffer(t))return t.byteLength;if(h.isURLSearchParams(t)&&(t=t+""),h.isString(t))return(await Qa(t)).byteLength},Ka=async(t,e)=>{const r=h.toFiniteNumber(t.getContentLength());return r??Ja(e)},Xa=ze&&(async t=>{let{url:e,method:r,data:s,signal:o,cancelToken:a,timeout:n,onDownloadProgress:l,onUploadProgress:i,responseType:c,headers:p,withCredentials:R="same-origin",fetchOptions:I}=Ar(t);c=c?(c+"").toLowerCase():"text";let T=Ha([o,a&&a.toAbortSignal()],n),f;const y=T&&T.unsubscribe&&(()=>{T.unsubscribe()});let w;try{if(i&&Ma&&r!=="get"&&r!=="head"&&(w=await Ka(p,s))!==0){let U=new Request(e,{method:"POST",body:s,duplex:"half"}),L;if(h.isFormData(s)&&(L=U.headers.get("content-type"))&&p.setContentType(L),U.body){const[B,Z]=Et(w,Te(gt(i)));s=vt(U.body,Ct,B,Z)}}h.isString(R)||(R=R?"include":"omit");const C="credentials"in Request.prototype;f=new Request(e,{...I,signal:T,method:r.toUpperCase(),headers:p.normalize().toJSON(),body:s,duplex:"half",credentials:C?R:void 0});let D=await fetch(f,I);const E=tt&&(c==="stream"||c==="response");if(tt&&(l||E&&y)){const U={};["status","statusText","headers"].forEach(ce=>{U[ce]=D[ce]});const L=h.toFiniteNumber(D.headers.get("content-length")),[B,Z]=l&&Et(L,Te(gt(l),!0))||[];D=new Response(vt(D.body,Ct,B,()=>{Z&&Z(),y&&y()}),U)}c=c||"text";let v=await Be[h.findKey(Be,c)||"text"](D,t);return!E&&y&&y(),await new Promise((U,L)=>{Vr(U,L,{data:v,headers:q.from(D.headers),status:D.status,statusText:D.statusText,config:t,request:f})})}catch(C){throw y&&y(),C&&C.name==="TypeError"&&/Load failed|fetch/i.test(C.message)?Object.assign(new g("Network Error",g.ERR_NETWORK,t,f),{cause:C.cause||C}):g.from(C,C&&C.code,t,f)}}),rt={http:ha,xhr:qa,fetch:Xa};h.forEach(rt,(t,e)=>{if(t){try{Object.defineProperty(t,"name",{value:e})}catch{}Object.defineProperty(t,"adapterName",{value:e})}});const Ut=t=>`- ${t}`,Ya=t=>h.isFunction(t)||t===null||t===!1,yr={getAdapter:t=>{t=h.isArray(t)?t:[t];const{length:e}=t;let r,s;const o={};for(let a=0;a`adapter ${l} `+(i===!1?"is not supported by the environment":"is not available in the build"));let n=e?a.length>1?`since : -`+a.map(Ut).join(` -`):" "+Ut(a[0]):"as no adapter specified";throw new g("There is no suitable adapter to dispatch the request "+n,"ERR_NOT_SUPPORT")}return s},adapters:rt};function Je(t){if(t.cancelToken&&t.cancelToken.throwIfRequested(),t.signal&&t.signal.aborted)throw new he(null,t)}function Tt(t){return Je(t),t.headers=q.from(t.headers),t.data=Me.call(t,t.transformRequest),["post","put","patch"].indexOf(t.method)!==-1&&t.headers.setContentType("application/x-www-form-urlencoded",!1),yr.getAdapter(t.adapter||fe.adapter)(t).then(function(s){return Je(t),s.data=Me.call(t,t.transformResponse,s),s.headers=q.from(s.headers),s},function(s){return mr(s)||(Je(t),s&&s.response&&(s.response.data=Me.call(t,t.transformResponse,s.response),s.response.headers=q.from(s.response.headers))),Promise.reject(s)})}const wr="1.11.0",Fe={};["object","boolean","number","function","string","symbol"].forEach((t,e)=>{Fe[t]=function(s){return typeof s===t||"a"+(e<1?"n ":" ")+t}});const Bt={};Fe.transitional=function(e,r,s){function o(a,n){return"[Axios v"+wr+"] Transitional option '"+a+"'"+n+(s?". "+s:"")}return(a,n,l)=>{if(e===!1)throw new g(o(n," has been removed"+(r?" in "+r:"")),g.ERR_DEPRECATED);return r&&!Bt[n]&&(Bt[n]=!0,console.warn(o(n," has been deprecated since v"+r+" and will be removed in the near future"))),e?e(a,n,l):!0}};Fe.spelling=function(e){return(r,s)=>(console.warn(`${s} is likely a misspelling of ${e}`),!0)};function Za(t,e,r){if(typeof t!="object")throw new g("options must be an object",g.ERR_BAD_OPTION_VALUE);const s=Object.keys(t);let o=s.length;for(;o-- >0;){const a=s[o],n=e[a];if(n){const l=t[a],i=l===void 0||n(l,a,t);if(i!==!0)throw new g("option "+a+" must be "+i,g.ERR_BAD_OPTION_VALUE);continue}if(r!==!0)throw new g("Unknown option "+a,g.ERR_BAD_OPTION)}}const ve={assertOptions:Za,validators:Fe},M=ve.validators;let le=class{constructor(e){this.defaults=e||{},this.interceptors={request:new wt,response:new wt}}async request(e,r){try{return await this._request(e,r)}catch(s){if(s instanceof Error){let o={};Error.captureStackTrace?Error.captureStackTrace(o):o=new Error;const a=o.stack?o.stack.replace(/^.+\n/,""):"";try{s.stack?a&&!String(s.stack).endsWith(a.replace(/^.+\n.+\n/,""))&&(s.stack+=` -`+a):s.stack=a}catch{}}throw s}}_request(e,r){typeof e=="string"?(r=r||{},r.url=e):r=e||{},r=ie(this.defaults,r);const{transitional:s,paramsSerializer:o,headers:a}=r;s!==void 0&&ve.assertOptions(s,{silentJSONParsing:M.transitional(M.boolean),forcedJSONParsing:M.transitional(M.boolean),clarifyTimeoutError:M.transitional(M.boolean)},!1),o!=null&&(h.isFunction(o)?r.paramsSerializer={serialize:o}:ve.assertOptions(o,{encode:M.function,serialize:M.function},!0)),r.allowAbsoluteUrls!==void 0||(this.defaults.allowAbsoluteUrls!==void 0?r.allowAbsoluteUrls=this.defaults.allowAbsoluteUrls:r.allowAbsoluteUrls=!0),ve.assertOptions(r,{baseUrl:M.spelling("baseURL"),withXsrfToken:M.spelling("withXSRFToken")},!0),r.method=(r.method||this.defaults.method||"get").toLowerCase();let n=a&&h.merge(a.common,a[r.method]);a&&h.forEach(["delete","get","head","post","put","patch","common"],f=>{delete a[f]}),r.headers=q.concat(n,a);const l=[];let i=!0;this.interceptors.request.forEach(function(y){typeof y.runWhen=="function"&&y.runWhen(r)===!1||(i=i&&y.synchronous,l.unshift(y.fulfilled,y.rejected))});const c=[];this.interceptors.response.forEach(function(y){c.push(y.fulfilled,y.rejected)});let p,R=0,I;if(!i){const f=[Tt.bind(this),void 0];for(f.unshift(...l),f.push(...c),I=f.length,p=Promise.resolve(r);R{if(!s._listeners)return;let a=s._listeners.length;for(;a-- >0;)s._listeners[a](o);s._listeners=null}),this.promise.then=o=>{let a;const n=new Promise(l=>{s.subscribe(l),a=l}).then(o);return n.cancel=function(){s.unsubscribe(a)},n},e(function(a,n,l){s.reason||(s.reason=new he(a,n,l),r(s.reason))})}throwIfRequested(){if(this.reason)throw this.reason}subscribe(e){if(this.reason){e(this.reason);return}this._listeners?this._listeners.push(e):this._listeners=[e]}unsubscribe(e){if(!this._listeners)return;const r=this._listeners.indexOf(e);r!==-1&&this._listeners.splice(r,1)}toAbortSignal(){const e=new AbortController,r=s=>{e.abort(s)};return this.subscribe(r),e.signal.unsubscribe=()=>this.unsubscribe(r),e.signal}static source(){let e;return{token:new Ir(function(o){e=o}),cancel:e}}};function to(t){return function(r){return t.apply(null,r)}}function ro(t){return h.isObject(t)&&t.isAxiosError===!0}const st={Continue:100,SwitchingProtocols:101,Processing:102,EarlyHints:103,Ok:200,Created:201,Accepted:202,NonAuthoritativeInformation:203,NoContent:204,ResetContent:205,PartialContent:206,MultiStatus:207,AlreadyReported:208,ImUsed:226,MultipleChoices:300,MovedPermanently:301,Found:302,SeeOther:303,NotModified:304,UseProxy:305,Unused:306,TemporaryRedirect:307,PermanentRedirect:308,BadRequest:400,Unauthorized:401,PaymentRequired:402,Forbidden:403,NotFound:404,MethodNotAllowed:405,NotAcceptable:406,ProxyAuthenticationRequired:407,RequestTimeout:408,Conflict:409,Gone:410,LengthRequired:411,PreconditionFailed:412,PayloadTooLarge:413,UriTooLong:414,UnsupportedMediaType:415,RangeNotSatisfiable:416,ExpectationFailed:417,ImATeapot:418,MisdirectedRequest:421,UnprocessableEntity:422,Locked:423,FailedDependency:424,TooEarly:425,UpgradeRequired:426,PreconditionRequired:428,TooManyRequests:429,RequestHeaderFieldsTooLarge:431,UnavailableForLegalReasons:451,InternalServerError:500,NotImplemented:501,BadGateway:502,ServiceUnavailable:503,GatewayTimeout:504,HttpVersionNotSupported:505,VariantAlsoNegotiates:506,InsufficientStorage:507,LoopDetected:508,NotExtended:510,NetworkAuthenticationRequired:511};Object.entries(st).forEach(([t,e])=>{st[e]=t});function Er(t){const e=new le(t),r=rr(le.prototype.request,e);return h.extend(r,le.prototype,e,{allOwnKeys:!0}),h.extend(r,e,null,{allOwnKeys:!0}),r.create=function(o){return Er(ie(t,o))},r}const u=Er(fe);u.Axios=le;u.CanceledError=he;u.CancelToken=eo;u.isCancel=mr;u.VERSION=wr;u.toFormData=ke;u.AxiosError=g;u.Cancel=u.CanceledError;u.all=function(e){return Promise.all(e)};u.spread=to;u.isAxiosError=ro;u.mergeConfig=ie;u.AxiosHeaders=q;u.formToJSON=t=>br(h.isHTMLForm(t)?new FormData(t):t);u.getAdapter=yr.getAdapter;u.HttpStatusCode=st;u.default=u;const{Axios:Go,AxiosError:_o,CanceledError:qo,isCancel:Ho,CancelToken:$o,VERSION:No,all:Wo,Cancel:Qo,isAxiosError:Mo,spread:Jo,toFormData:Ko,AxiosHeaders:Xo,HttpStatusCode:Yo,formToJSON:Zo,getAdapter:en,mergeConfig:tn}=u,O="/api/v1".replace(/\/+$/,"");class H{constructor(e,r=O,s=u){this.basePath=r,this.axios=s,e&&(this.configuration=e,this.basePath=e.basePath??r)}configuration}class so extends Error{constructor(e,r){super(r),this.field=e,this.name="RequiredError"}}const P={},b="https://example.com",d=function(t,e,r){if(r==null)throw new so(e,`Required parameter ${e} was null or undefined when calling ${t}.`)},m=async function(t,e,r){if(r&&r.apiKey){const s=typeof r.apiKey=="function"?await r.apiKey(e):await r.apiKey;t[e]=s}};function at(t,e,r=""){e!=null&&(typeof e=="object"?Array.isArray(e)?e.forEach(s=>at(t,s,r)):Object.keys(e).forEach(s=>at(t,e[s],`${r}${r!==""?".":""}${s}`)):t.has(r)?t.append(r,e):t.set(r,e))}const V=function(t,...e){const r=new URLSearchParams(t.search);at(r,e),t.search=r.toString()},x=function(t,e,r){const s=typeof t!="string";return(s&&r&&r.isJsonMime?r.isJsonMime(e.headers["Content-Type"]):s)?JSON.stringify(t!==void 0?t:{}):t||""},S=function(t){return t.pathname+t.search+t.hash},A=function(t,e,r,s){return(o=e,a=r)=>{const n={...t.options,url:(o.defaults.baseURL?"":s?.basePath??a)+t.url};return o.request(n)}},ao=function(t){return{updateController:async(e,r={})=>{d("updateController","body",e);const s="/controller",o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"PUT",...a,...r},l={},i={};await m(l,"Authorization",t),l["Content-Type"]="application/json",V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},n.data=x(e,n,t),{url:S(o),options:n}}}},oo=function(t){const e=ao(t);return{async updateController(r,s){const o=await e.updateController(r,s),a=t?.serverIndex??0,n=P["ControllerApi.updateController"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)}}};class Dt extends H{updateController(e,r){return oo(this.configuration).updateController(e,r).then(s=>s(this.axios,this.basePath))}}const no=function(t){return{controllerInfo:async(e={})=>{const r="/controller-info",s=new URL(r,b);let o;t&&(o=t.baseOptions);const a={method:"GET",...o,...e},n={},l={};await m(n,"Authorization",t),V(s,l);let i=o&&o.headers?o.headers:{};return a.headers={...n,...i,...e.headers},{url:S(s),options:a}}}},lo=function(t){const e=no(t);return{async controllerInfo(r){const s=await e.controllerInfo(r),o=t?.serverIndex??0,a=P["ControllerInfoApi.controllerInfo"]?.[o]?.url;return(n,l)=>A(s,u,O,t)(n,a||l)}}};class Lt extends H{controllerInfo(e){return lo(this.configuration).controllerInfo(e).then(r=>r(this.axios,this.basePath))}}const io=function(t){return{createCredentials:async(e,r={})=>{d("createCredentials","body",e);const s="/github/credentials",o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"POST",...a,...r},l={},i={};await m(l,"Authorization",t),l["Content-Type"]="application/json",V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},n.data=x(e,n,t),{url:S(o),options:n}},createGiteaCredentials:async(e,r={})=>{d("createGiteaCredentials","body",e);const s="/gitea/credentials",o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"POST",...a,...r},l={},i={};await m(l,"Authorization",t),l["Content-Type"]="application/json",V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},n.data=x(e,n,t),{url:S(o),options:n}},deleteCredentials:async(e,r={})=>{d("deleteCredentials","id",e);const s="/github/credentials/{id}".replace("{id}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"DELETE",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},deleteGiteaCredentials:async(e,r={})=>{d("deleteGiteaCredentials","id",e);const s="/gitea/credentials/{id}".replace("{id}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"DELETE",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},getCredentials:async(e,r={})=>{d("getCredentials","id",e);const s="/github/credentials/{id}".replace("{id}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},getGiteaCredentials:async(e,r={})=>{d("getGiteaCredentials","id",e);const s="/gitea/credentials/{id}".replace("{id}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listCredentials:async(e={})=>{const r="/github/credentials",s=new URL(r,b);let o;t&&(o=t.baseOptions);const a={method:"GET",...o,...e},n={},l={};await m(n,"Authorization",t),V(s,l);let i=o&&o.headers?o.headers:{};return a.headers={...n,...i,...e.headers},{url:S(s),options:a}},listGiteaCredentials:async(e={})=>{const r="/gitea/credentials",s=new URL(r,b);let o;t&&(o=t.baseOptions);const a={method:"GET",...o,...e},n={},l={};await m(n,"Authorization",t),V(s,l);let i=o&&o.headers?o.headers:{};return a.headers={...n,...i,...e.headers},{url:S(s),options:a}},updateCredentials:async(e,r,s={})=>{d("updateCredentials","id",e),d("updateCredentials","body",r);const o="/github/credentials/{id}".replace("{id}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"PUT",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},updateGiteaCredentials:async(e,r,s={})=>{d("updateGiteaCredentials","id",e),d("updateGiteaCredentials","body",r);const o="/gitea/credentials/{id}".replace("{id}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"PUT",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}}}},J=function(t){const e=io(t);return{async createCredentials(r,s){const o=await e.createCredentials(r,s),a=t?.serverIndex??0,n=P["CredentialsApi.createCredentials"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async createGiteaCredentials(r,s){const o=await e.createGiteaCredentials(r,s),a=t?.serverIndex??0,n=P["CredentialsApi.createGiteaCredentials"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async deleteCredentials(r,s){const o=await e.deleteCredentials(r,s),a=t?.serverIndex??0,n=P["CredentialsApi.deleteCredentials"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async deleteGiteaCredentials(r,s){const o=await e.deleteGiteaCredentials(r,s),a=t?.serverIndex??0,n=P["CredentialsApi.deleteGiteaCredentials"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async getCredentials(r,s){const o=await e.getCredentials(r,s),a=t?.serverIndex??0,n=P["CredentialsApi.getCredentials"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async getGiteaCredentials(r,s){const o=await e.getGiteaCredentials(r,s),a=t?.serverIndex??0,n=P["CredentialsApi.getGiteaCredentials"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listCredentials(r){const s=await e.listCredentials(r),o=t?.serverIndex??0,a=P["CredentialsApi.listCredentials"]?.[o]?.url;return(n,l)=>A(s,u,O,t)(n,a||l)},async listGiteaCredentials(r){const s=await e.listGiteaCredentials(r),o=t?.serverIndex??0,a=P["CredentialsApi.listGiteaCredentials"]?.[o]?.url;return(n,l)=>A(s,u,O,t)(n,a||l)},async updateCredentials(r,s,o){const a=await e.updateCredentials(r,s,o),n=t?.serverIndex??0,l=P["CredentialsApi.updateCredentials"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async updateGiteaCredentials(r,s,o){const a=await e.updateGiteaCredentials(r,s,o),n=t?.serverIndex??0,l=P["CredentialsApi.updateGiteaCredentials"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)}}};class jt extends H{createCredentials(e,r){return J(this.configuration).createCredentials(e,r).then(s=>s(this.axios,this.basePath))}createGiteaCredentials(e,r){return J(this.configuration).createGiteaCredentials(e,r).then(s=>s(this.axios,this.basePath))}deleteCredentials(e,r){return J(this.configuration).deleteCredentials(e,r).then(s=>s(this.axios,this.basePath))}deleteGiteaCredentials(e,r){return J(this.configuration).deleteGiteaCredentials(e,r).then(s=>s(this.axios,this.basePath))}getCredentials(e,r){return J(this.configuration).getCredentials(e,r).then(s=>s(this.axios,this.basePath))}getGiteaCredentials(e,r){return J(this.configuration).getGiteaCredentials(e,r).then(s=>s(this.axios,this.basePath))}listCredentials(e){return J(this.configuration).listCredentials(e).then(r=>r(this.axios,this.basePath))}listGiteaCredentials(e){return J(this.configuration).listGiteaCredentials(e).then(r=>r(this.axios,this.basePath))}updateCredentials(e,r,s){return J(this.configuration).updateCredentials(e,r,s).then(o=>o(this.axios,this.basePath))}updateGiteaCredentials(e,r,s){return J(this.configuration).updateGiteaCredentials(e,r,s).then(o=>o(this.axios,this.basePath))}}const co=function(t){return{createGiteaEndpoint:async(e,r={})=>{d("createGiteaEndpoint","body",e);const s="/gitea/endpoints",o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"POST",...a,...r},l={},i={};await m(l,"Authorization",t),l["Content-Type"]="application/json",V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},n.data=x(e,n,t),{url:S(o),options:n}},createGithubEndpoint:async(e,r={})=>{d("createGithubEndpoint","body",e);const s="/github/endpoints",o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"POST",...a,...r},l={},i={};await m(l,"Authorization",t),l["Content-Type"]="application/json",V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},n.data=x(e,n,t),{url:S(o),options:n}},deleteGiteaEndpoint:async(e,r={})=>{d("deleteGiteaEndpoint","name",e);const s="/gitea/endpoints/{name}".replace("{name}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"DELETE",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},deleteGithubEndpoint:async(e,r={})=>{d("deleteGithubEndpoint","name",e);const s="/github/endpoints/{name}".replace("{name}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"DELETE",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},getGiteaEndpoint:async(e,r={})=>{d("getGiteaEndpoint","name",e);const s="/gitea/endpoints/{name}".replace("{name}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},getGithubEndpoint:async(e,r={})=>{d("getGithubEndpoint","name",e);const s="/github/endpoints/{name}".replace("{name}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listGiteaEndpoints:async(e={})=>{const r="/gitea/endpoints",s=new URL(r,b);let o;t&&(o=t.baseOptions);const a={method:"GET",...o,...e},n={},l={};await m(n,"Authorization",t),V(s,l);let i=o&&o.headers?o.headers:{};return a.headers={...n,...i,...e.headers},{url:S(s),options:a}},listGithubEndpoints:async(e={})=>{const r="/github/endpoints",s=new URL(r,b);let o;t&&(o=t.baseOptions);const a={method:"GET",...o,...e},n={},l={};await m(n,"Authorization",t),V(s,l);let i=o&&o.headers?o.headers:{};return a.headers={...n,...i,...e.headers},{url:S(s),options:a}},updateGiteaEndpoint:async(e,r,s={})=>{d("updateGiteaEndpoint","name",e),d("updateGiteaEndpoint","body",r);const o="/gitea/endpoints/{name}".replace("{name}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"PUT",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},updateGithubEndpoint:async(e,r,s={})=>{d("updateGithubEndpoint","name",e),d("updateGithubEndpoint","body",r);const o="/github/endpoints/{name}".replace("{name}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"PUT",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}}}},K=function(t){const e=co(t);return{async createGiteaEndpoint(r,s){const o=await e.createGiteaEndpoint(r,s),a=t?.serverIndex??0,n=P["EndpointsApi.createGiteaEndpoint"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async createGithubEndpoint(r,s){const o=await e.createGithubEndpoint(r,s),a=t?.serverIndex??0,n=P["EndpointsApi.createGithubEndpoint"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async deleteGiteaEndpoint(r,s){const o=await e.deleteGiteaEndpoint(r,s),a=t?.serverIndex??0,n=P["EndpointsApi.deleteGiteaEndpoint"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async deleteGithubEndpoint(r,s){const o=await e.deleteGithubEndpoint(r,s),a=t?.serverIndex??0,n=P["EndpointsApi.deleteGithubEndpoint"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async getGiteaEndpoint(r,s){const o=await e.getGiteaEndpoint(r,s),a=t?.serverIndex??0,n=P["EndpointsApi.getGiteaEndpoint"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async getGithubEndpoint(r,s){const o=await e.getGithubEndpoint(r,s),a=t?.serverIndex??0,n=P["EndpointsApi.getGithubEndpoint"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listGiteaEndpoints(r){const s=await e.listGiteaEndpoints(r),o=t?.serverIndex??0,a=P["EndpointsApi.listGiteaEndpoints"]?.[o]?.url;return(n,l)=>A(s,u,O,t)(n,a||l)},async listGithubEndpoints(r){const s=await e.listGithubEndpoints(r),o=t?.serverIndex??0,a=P["EndpointsApi.listGithubEndpoints"]?.[o]?.url;return(n,l)=>A(s,u,O,t)(n,a||l)},async updateGiteaEndpoint(r,s,o){const a=await e.updateGiteaEndpoint(r,s,o),n=t?.serverIndex??0,l=P["EndpointsApi.updateGiteaEndpoint"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async updateGithubEndpoint(r,s,o){const a=await e.updateGithubEndpoint(r,s,o),n=t?.serverIndex??0,l=P["EndpointsApi.updateGithubEndpoint"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)}}};class kt extends H{createGiteaEndpoint(e,r){return K(this.configuration).createGiteaEndpoint(e,r).then(s=>s(this.axios,this.basePath))}createGithubEndpoint(e,r){return K(this.configuration).createGithubEndpoint(e,r).then(s=>s(this.axios,this.basePath))}deleteGiteaEndpoint(e,r){return K(this.configuration).deleteGiteaEndpoint(e,r).then(s=>s(this.axios,this.basePath))}deleteGithubEndpoint(e,r){return K(this.configuration).deleteGithubEndpoint(e,r).then(s=>s(this.axios,this.basePath))}getGiteaEndpoint(e,r){return K(this.configuration).getGiteaEndpoint(e,r).then(s=>s(this.axios,this.basePath))}getGithubEndpoint(e,r){return K(this.configuration).getGithubEndpoint(e,r).then(s=>s(this.axios,this.basePath))}listGiteaEndpoints(e){return K(this.configuration).listGiteaEndpoints(e).then(r=>r(this.axios,this.basePath))}listGithubEndpoints(e){return K(this.configuration).listGithubEndpoints(e).then(r=>r(this.axios,this.basePath))}updateGiteaEndpoint(e,r,s){return K(this.configuration).updateGiteaEndpoint(e,r,s).then(o=>o(this.axios,this.basePath))}updateGithubEndpoint(e,r,s){return K(this.configuration).updateGithubEndpoint(e,r,s).then(o=>o(this.axios,this.basePath))}}const po=function(t){return{createEnterprise:async(e,r={})=>{d("createEnterprise","body",e);const s="/enterprises",o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"POST",...a,...r},l={},i={};await m(l,"Authorization",t),l["Content-Type"]="application/json",V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},n.data=x(e,n,t),{url:S(o),options:n}},createEnterprisePool:async(e,r,s={})=>{d("createEnterprisePool","enterpriseID",e),d("createEnterprisePool","body",r);const o="/enterprises/{enterpriseID}/pools".replace("{enterpriseID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},createEnterpriseScaleSet:async(e,r,s={})=>{d("createEnterpriseScaleSet","enterpriseID",e),d("createEnterpriseScaleSet","body",r);const o="/enterprises/{enterpriseID}/scalesets".replace("{enterpriseID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},deleteEnterprise:async(e,r={})=>{d("deleteEnterprise","enterpriseID",e);const s="/enterprises/{enterpriseID}".replace("{enterpriseID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"DELETE",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},deleteEnterprisePool:async(e,r,s={})=>{d("deleteEnterprisePool","enterpriseID",e),d("deleteEnterprisePool","poolID",r);const o="/enterprises/{enterpriseID}/pools/{poolID}".replace("{enterpriseID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"DELETE",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},getEnterprise:async(e,r={})=>{d("getEnterprise","enterpriseID",e);const s="/enterprises/{enterpriseID}".replace("{enterpriseID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},getEnterprisePool:async(e,r,s={})=>{d("getEnterprisePool","enterpriseID",e),d("getEnterprisePool","poolID",r);const o="/enterprises/{enterpriseID}/pools/{poolID}".replace("{enterpriseID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"GET",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},listEnterpriseInstances:async(e,r={})=>{d("listEnterpriseInstances","enterpriseID",e);const s="/enterprises/{enterpriseID}/instances".replace("{enterpriseID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listEnterprisePools:async(e,r={})=>{d("listEnterprisePools","enterpriseID",e);const s="/enterprises/{enterpriseID}/pools".replace("{enterpriseID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listEnterpriseScaleSets:async(e,r={})=>{d("listEnterpriseScaleSets","enterpriseID",e);const s="/enterprises/{enterpriseID}/scalesets".replace("{enterpriseID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listEnterprises:async(e,r,s={})=>{const o="/enterprises",a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"GET",...n,...s},i={},c={};await m(i,"Authorization",t),e!==void 0&&(c.name=e),r!==void 0&&(c.endpoint=r),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},updateEnterprise:async(e,r,s={})=>{d("updateEnterprise","enterpriseID",e),d("updateEnterprise","body",r);const o="/enterprises/{enterpriseID}".replace("{enterpriseID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"PUT",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},updateEnterprisePool:async(e,r,s,o={})=>{d("updateEnterprisePool","enterpriseID",e),d("updateEnterprisePool","poolID",r),d("updateEnterprisePool","body",s);const a="/enterprises/{enterpriseID}/pools/{poolID}".replace("{enterpriseID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),n=new URL(a,b);let l;t&&(l=t.baseOptions);const i={method:"PUT",...l,...o},c={},p={};await m(c,"Authorization",t),c["Content-Type"]="application/json",V(n,p);let R=l&&l.headers?l.headers:{};return i.headers={...c,...R,...o.headers},i.data=x(s,i,t),{url:S(n),options:i}}}},$=function(t){const e=po(t);return{async createEnterprise(r,s){const o=await e.createEnterprise(r,s),a=t?.serverIndex??0,n=P["EnterprisesApi.createEnterprise"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async createEnterprisePool(r,s,o){const a=await e.createEnterprisePool(r,s,o),n=t?.serverIndex??0,l=P["EnterprisesApi.createEnterprisePool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async createEnterpriseScaleSet(r,s,o){const a=await e.createEnterpriseScaleSet(r,s,o),n=t?.serverIndex??0,l=P["EnterprisesApi.createEnterpriseScaleSet"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async deleteEnterprise(r,s){const o=await e.deleteEnterprise(r,s),a=t?.serverIndex??0,n=P["EnterprisesApi.deleteEnterprise"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async deleteEnterprisePool(r,s,o){const a=await e.deleteEnterprisePool(r,s,o),n=t?.serverIndex??0,l=P["EnterprisesApi.deleteEnterprisePool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async getEnterprise(r,s){const o=await e.getEnterprise(r,s),a=t?.serverIndex??0,n=P["EnterprisesApi.getEnterprise"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async getEnterprisePool(r,s,o){const a=await e.getEnterprisePool(r,s,o),n=t?.serverIndex??0,l=P["EnterprisesApi.getEnterprisePool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async listEnterpriseInstances(r,s){const o=await e.listEnterpriseInstances(r,s),a=t?.serverIndex??0,n=P["EnterprisesApi.listEnterpriseInstances"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listEnterprisePools(r,s){const o=await e.listEnterprisePools(r,s),a=t?.serverIndex??0,n=P["EnterprisesApi.listEnterprisePools"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listEnterpriseScaleSets(r,s){const o=await e.listEnterpriseScaleSets(r,s),a=t?.serverIndex??0,n=P["EnterprisesApi.listEnterpriseScaleSets"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listEnterprises(r,s,o){const a=await e.listEnterprises(r,s,o),n=t?.serverIndex??0,l=P["EnterprisesApi.listEnterprises"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async updateEnterprise(r,s,o){const a=await e.updateEnterprise(r,s,o),n=t?.serverIndex??0,l=P["EnterprisesApi.updateEnterprise"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async updateEnterprisePool(r,s,o,a){const n=await e.updateEnterprisePool(r,s,o,a),l=t?.serverIndex??0,i=P["EnterprisesApi.updateEnterprisePool"]?.[l]?.url;return(c,p)=>A(n,u,O,t)(c,i||p)}}};class zt extends H{createEnterprise(e,r){return $(this.configuration).createEnterprise(e,r).then(s=>s(this.axios,this.basePath))}createEnterprisePool(e,r,s){return $(this.configuration).createEnterprisePool(e,r,s).then(o=>o(this.axios,this.basePath))}createEnterpriseScaleSet(e,r,s){return $(this.configuration).createEnterpriseScaleSet(e,r,s).then(o=>o(this.axios,this.basePath))}deleteEnterprise(e,r){return $(this.configuration).deleteEnterprise(e,r).then(s=>s(this.axios,this.basePath))}deleteEnterprisePool(e,r,s){return $(this.configuration).deleteEnterprisePool(e,r,s).then(o=>o(this.axios,this.basePath))}getEnterprise(e,r){return $(this.configuration).getEnterprise(e,r).then(s=>s(this.axios,this.basePath))}getEnterprisePool(e,r,s){return $(this.configuration).getEnterprisePool(e,r,s).then(o=>o(this.axios,this.basePath))}listEnterpriseInstances(e,r){return $(this.configuration).listEnterpriseInstances(e,r).then(s=>s(this.axios,this.basePath))}listEnterprisePools(e,r){return $(this.configuration).listEnterprisePools(e,r).then(s=>s(this.axios,this.basePath))}listEnterpriseScaleSets(e,r){return $(this.configuration).listEnterpriseScaleSets(e,r).then(s=>s(this.axios,this.basePath))}listEnterprises(e,r,s){return $(this.configuration).listEnterprises(e,r,s).then(o=>o(this.axios,this.basePath))}updateEnterprise(e,r,s){return $(this.configuration).updateEnterprise(e,r,s).then(o=>o(this.axios,this.basePath))}updateEnterprisePool(e,r,s,o){return $(this.configuration).updateEnterprisePool(e,r,s,o).then(a=>a(this.axios,this.basePath))}}const ho=function(t){return{firstRun:async(e,r={})=>{d("firstRun","body",e);const s="/first-run",o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"POST",...a,...r},l={},i={};await m(l,"Authorization",t),l["Content-Type"]="application/json",V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},n.data=x(e,n,t),{url:S(o),options:n}}}},uo=function(t){const e=ho(t);return{async firstRun(r,s){const o=await e.firstRun(r,s),a=t?.serverIndex??0,n=P["FirstRunApi.firstRun"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)}}};class Ft extends H{firstRun(e,r){return uo(this.configuration).firstRun(e,r).then(s=>s(this.axios,this.basePath))}}const Oo=function(t){return{getOrgWebhookInfo:async(e,r={})=>{d("getOrgWebhookInfo","orgID",e);const s="/organizations/{orgID}/webhook".replace("{orgID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},getRepoWebhookInfo:async(e,r={})=>{d("getRepoWebhookInfo","repoID",e);const s="/repositories/{repoID}/webhook".replace("{repoID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},installOrgWebhook:async(e,r,s={})=>{d("installOrgWebhook","orgID",e),d("installOrgWebhook","body",r);const o="/organizations/{orgID}/webhook".replace("{orgID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},installRepoWebhook:async(e,r,s={})=>{d("installRepoWebhook","repoID",e),d("installRepoWebhook","body",r);const o="/repositories/{repoID}/webhook".replace("{repoID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},uninstallOrgWebhook:async(e,r={})=>{d("uninstallOrgWebhook","orgID",e);const s="/organizations/{orgID}/webhook".replace("{orgID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"DELETE",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},uninstallRepoWebhook:async(e,r={})=>{d("uninstallRepoWebhook","repoID",e);const s="/repositories/{repoID}/webhook".replace("{repoID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"DELETE",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}}}},pe=function(t){const e=Oo(t);return{async getOrgWebhookInfo(r,s){const o=await e.getOrgWebhookInfo(r,s),a=t?.serverIndex??0,n=P["HooksApi.getOrgWebhookInfo"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async getRepoWebhookInfo(r,s){const o=await e.getRepoWebhookInfo(r,s),a=t?.serverIndex??0,n=P["HooksApi.getRepoWebhookInfo"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async installOrgWebhook(r,s,o){const a=await e.installOrgWebhook(r,s,o),n=t?.serverIndex??0,l=P["HooksApi.installOrgWebhook"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async installRepoWebhook(r,s,o){const a=await e.installRepoWebhook(r,s,o),n=t?.serverIndex??0,l=P["HooksApi.installRepoWebhook"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async uninstallOrgWebhook(r,s){const o=await e.uninstallOrgWebhook(r,s),a=t?.serverIndex??0,n=P["HooksApi.uninstallOrgWebhook"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async uninstallRepoWebhook(r,s){const o=await e.uninstallRepoWebhook(r,s),a=t?.serverIndex??0,n=P["HooksApi.uninstallRepoWebhook"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)}}};class Gt extends H{getOrgWebhookInfo(e,r){return pe(this.configuration).getOrgWebhookInfo(e,r).then(s=>s(this.axios,this.basePath))}getRepoWebhookInfo(e,r){return pe(this.configuration).getRepoWebhookInfo(e,r).then(s=>s(this.axios,this.basePath))}installOrgWebhook(e,r,s){return pe(this.configuration).installOrgWebhook(e,r,s).then(o=>o(this.axios,this.basePath))}installRepoWebhook(e,r,s){return pe(this.configuration).installRepoWebhook(e,r,s).then(o=>o(this.axios,this.basePath))}uninstallOrgWebhook(e,r){return pe(this.configuration).uninstallOrgWebhook(e,r).then(s=>s(this.axios,this.basePath))}uninstallRepoWebhook(e,r){return pe(this.configuration).uninstallRepoWebhook(e,r).then(s=>s(this.axios,this.basePath))}}const Po=function(t){return{deleteInstance:async(e,r,s,o={})=>{d("deleteInstance","instanceName",e);const a="/instances/{instanceName}".replace("{instanceName}",encodeURIComponent(String(e))),n=new URL(a,b);let l;t&&(l=t.baseOptions);const i={method:"DELETE",...l,...o},c={},p={};await m(c,"Authorization",t),r!==void 0&&(p.forceRemove=r),s!==void 0&&(p.bypassGHUnauthorized=s),V(n,p);let R=l&&l.headers?l.headers:{};return i.headers={...c,...R,...o.headers},{url:S(n),options:i}},getInstance:async(e,r={})=>{d("getInstance","instanceName",e);const s="/instances/{instanceName}".replace("{instanceName}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listEnterpriseInstances:async(e,r={})=>{d("listEnterpriseInstances","enterpriseID",e);const s="/enterprises/{enterpriseID}/instances".replace("{enterpriseID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listInstances:async(e={})=>{const r="/instances",s=new URL(r,b);let o;t&&(o=t.baseOptions);const a={method:"GET",...o,...e},n={},l={};await m(n,"Authorization",t),V(s,l);let i=o&&o.headers?o.headers:{};return a.headers={...n,...i,...e.headers},{url:S(s),options:a}},listOrgInstances:async(e,r={})=>{d("listOrgInstances","orgID",e);const s="/organizations/{orgID}/instances".replace("{orgID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listPoolInstances:async(e,r={})=>{d("listPoolInstances","poolID",e);const s="/pools/{poolID}/instances".replace("{poolID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listRepoInstances:async(e,r={})=>{d("listRepoInstances","repoID",e);const s="/repositories/{repoID}/instances".replace("{repoID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listScaleSetInstances:async(e,r={})=>{d("listScaleSetInstances","scalesetID",e);const s="/scalesets/{scalesetID}/instances".replace("{scalesetID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}}}},ae=function(t){const e=Po(t);return{async deleteInstance(r,s,o,a){const n=await e.deleteInstance(r,s,o,a),l=t?.serverIndex??0,i=P["InstancesApi.deleteInstance"]?.[l]?.url;return(c,p)=>A(n,u,O,t)(c,i||p)},async getInstance(r,s){const o=await e.getInstance(r,s),a=t?.serverIndex??0,n=P["InstancesApi.getInstance"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listEnterpriseInstances(r,s){const o=await e.listEnterpriseInstances(r,s),a=t?.serverIndex??0,n=P["InstancesApi.listEnterpriseInstances"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listInstances(r){const s=await e.listInstances(r),o=t?.serverIndex??0,a=P["InstancesApi.listInstances"]?.[o]?.url;return(n,l)=>A(s,u,O,t)(n,a||l)},async listOrgInstances(r,s){const o=await e.listOrgInstances(r,s),a=t?.serverIndex??0,n=P["InstancesApi.listOrgInstances"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listPoolInstances(r,s){const o=await e.listPoolInstances(r,s),a=t?.serverIndex??0,n=P["InstancesApi.listPoolInstances"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listRepoInstances(r,s){const o=await e.listRepoInstances(r,s),a=t?.serverIndex??0,n=P["InstancesApi.listRepoInstances"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listScaleSetInstances(r,s){const o=await e.listScaleSetInstances(r,s),a=t?.serverIndex??0,n=P["InstancesApi.listScaleSetInstances"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)}}};class _t extends H{deleteInstance(e,r,s,o){return ae(this.configuration).deleteInstance(e,r,s,o).then(a=>a(this.axios,this.basePath))}getInstance(e,r){return ae(this.configuration).getInstance(e,r).then(s=>s(this.axios,this.basePath))}listEnterpriseInstances(e,r){return ae(this.configuration).listEnterpriseInstances(e,r).then(s=>s(this.axios,this.basePath))}listInstances(e){return ae(this.configuration).listInstances(e).then(r=>r(this.axios,this.basePath))}listOrgInstances(e,r){return ae(this.configuration).listOrgInstances(e,r).then(s=>s(this.axios,this.basePath))}listPoolInstances(e,r){return ae(this.configuration).listPoolInstances(e,r).then(s=>s(this.axios,this.basePath))}listRepoInstances(e,r){return ae(this.configuration).listRepoInstances(e,r).then(s=>s(this.axios,this.basePath))}listScaleSetInstances(e,r){return ae(this.configuration).listScaleSetInstances(e,r).then(s=>s(this.axios,this.basePath))}}const bo=function(t){return{login:async(e,r={})=>{d("login","body",e);const s="/auth/login",o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"POST",...a,...r},l={},i={};await m(l,"Authorization",t),l["Content-Type"]="application/json",V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},n.data=x(e,n,t),{url:S(o),options:n}}}},mo=function(t){const e=bo(t);return{async login(r,s){const o=await e.login(r,s),a=t?.serverIndex??0,n=P["LoginApi.login"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)}}};class qt extends H{login(e,r){return mo(this.configuration).login(e,r).then(s=>s(this.axios,this.basePath))}}const Vo=function(t){return{createOrg:async(e,r={})=>{d("createOrg","body",e);const s="/organizations",o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"POST",...a,...r},l={},i={};await m(l,"Authorization",t),l["Content-Type"]="application/json",V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},n.data=x(e,n,t),{url:S(o),options:n}},createOrgPool:async(e,r,s={})=>{d("createOrgPool","orgID",e),d("createOrgPool","body",r);const o="/organizations/{orgID}/pools".replace("{orgID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},createOrgScaleSet:async(e,r,s={})=>{d("createOrgScaleSet","orgID",e),d("createOrgScaleSet","body",r);const o="/organizations/{orgID}/scalesets".replace("{orgID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},deleteOrg:async(e,r,s={})=>{d("deleteOrg","orgID",e);const o="/organizations/{orgID}".replace("{orgID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"DELETE",...n,...s},i={},c={};await m(i,"Authorization",t),r!==void 0&&(c.keepWebhook=r),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},deleteOrgPool:async(e,r,s={})=>{d("deleteOrgPool","orgID",e),d("deleteOrgPool","poolID",r);const o="/organizations/{orgID}/pools/{poolID}".replace("{orgID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"DELETE",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},getOrg:async(e,r={})=>{d("getOrg","orgID",e);const s="/organizations/{orgID}".replace("{orgID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},getOrgPool:async(e,r,s={})=>{d("getOrgPool","orgID",e),d("getOrgPool","poolID",r);const o="/organizations/{orgID}/pools/{poolID}".replace("{orgID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"GET",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},getOrgWebhookInfo:async(e,r={})=>{d("getOrgWebhookInfo","orgID",e);const s="/organizations/{orgID}/webhook".replace("{orgID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},installOrgWebhook:async(e,r,s={})=>{d("installOrgWebhook","orgID",e),d("installOrgWebhook","body",r);const o="/organizations/{orgID}/webhook".replace("{orgID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},listOrgInstances:async(e,r={})=>{d("listOrgInstances","orgID",e);const s="/organizations/{orgID}/instances".replace("{orgID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listOrgPools:async(e,r={})=>{d("listOrgPools","orgID",e);const s="/organizations/{orgID}/pools".replace("{orgID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listOrgScaleSets:async(e,r={})=>{d("listOrgScaleSets","orgID",e);const s="/organizations/{orgID}/scalesets".replace("{orgID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listOrgs:async(e,r,s={})=>{const o="/organizations",a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"GET",...n,...s},i={},c={};await m(i,"Authorization",t),e!==void 0&&(c.name=e),r!==void 0&&(c.endpoint=r),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},uninstallOrgWebhook:async(e,r={})=>{d("uninstallOrgWebhook","orgID",e);const s="/organizations/{orgID}/webhook".replace("{orgID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"DELETE",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},updateOrg:async(e,r,s={})=>{d("updateOrg","orgID",e),d("updateOrg","body",r);const o="/organizations/{orgID}".replace("{orgID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"PUT",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},updateOrgPool:async(e,r,s,o={})=>{d("updateOrgPool","orgID",e),d("updateOrgPool","poolID",r),d("updateOrgPool","body",s);const a="/organizations/{orgID}/pools/{poolID}".replace("{orgID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),n=new URL(a,b);let l;t&&(l=t.baseOptions);const i={method:"PUT",...l,...o},c={},p={};await m(c,"Authorization",t),c["Content-Type"]="application/json",V(n,p);let R=l&&l.headers?l.headers:{};return i.headers={...c,...R,...o.headers},i.data=x(s,i,t),{url:S(n),options:i}}}},z=function(t){const e=Vo(t);return{async createOrg(r,s){const o=await e.createOrg(r,s),a=t?.serverIndex??0,n=P["OrganizationsApi.createOrg"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async createOrgPool(r,s,o){const a=await e.createOrgPool(r,s,o),n=t?.serverIndex??0,l=P["OrganizationsApi.createOrgPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async createOrgScaleSet(r,s,o){const a=await e.createOrgScaleSet(r,s,o),n=t?.serverIndex??0,l=P["OrganizationsApi.createOrgScaleSet"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async deleteOrg(r,s,o){const a=await e.deleteOrg(r,s,o),n=t?.serverIndex??0,l=P["OrganizationsApi.deleteOrg"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async deleteOrgPool(r,s,o){const a=await e.deleteOrgPool(r,s,o),n=t?.serverIndex??0,l=P["OrganizationsApi.deleteOrgPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async getOrg(r,s){const o=await e.getOrg(r,s),a=t?.serverIndex??0,n=P["OrganizationsApi.getOrg"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async getOrgPool(r,s,o){const a=await e.getOrgPool(r,s,o),n=t?.serverIndex??0,l=P["OrganizationsApi.getOrgPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async getOrgWebhookInfo(r,s){const o=await e.getOrgWebhookInfo(r,s),a=t?.serverIndex??0,n=P["OrganizationsApi.getOrgWebhookInfo"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async installOrgWebhook(r,s,o){const a=await e.installOrgWebhook(r,s,o),n=t?.serverIndex??0,l=P["OrganizationsApi.installOrgWebhook"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async listOrgInstances(r,s){const o=await e.listOrgInstances(r,s),a=t?.serverIndex??0,n=P["OrganizationsApi.listOrgInstances"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listOrgPools(r,s){const o=await e.listOrgPools(r,s),a=t?.serverIndex??0,n=P["OrganizationsApi.listOrgPools"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listOrgScaleSets(r,s){const o=await e.listOrgScaleSets(r,s),a=t?.serverIndex??0,n=P["OrganizationsApi.listOrgScaleSets"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listOrgs(r,s,o){const a=await e.listOrgs(r,s,o),n=t?.serverIndex??0,l=P["OrganizationsApi.listOrgs"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async uninstallOrgWebhook(r,s){const o=await e.uninstallOrgWebhook(r,s),a=t?.serverIndex??0,n=P["OrganizationsApi.uninstallOrgWebhook"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async updateOrg(r,s,o){const a=await e.updateOrg(r,s,o),n=t?.serverIndex??0,l=P["OrganizationsApi.updateOrg"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async updateOrgPool(r,s,o,a){const n=await e.updateOrgPool(r,s,o,a),l=t?.serverIndex??0,i=P["OrganizationsApi.updateOrgPool"]?.[l]?.url;return(c,p)=>A(n,u,O,t)(c,i||p)}}};class Ht extends H{createOrg(e,r){return z(this.configuration).createOrg(e,r).then(s=>s(this.axios,this.basePath))}createOrgPool(e,r,s){return z(this.configuration).createOrgPool(e,r,s).then(o=>o(this.axios,this.basePath))}createOrgScaleSet(e,r,s){return z(this.configuration).createOrgScaleSet(e,r,s).then(o=>o(this.axios,this.basePath))}deleteOrg(e,r,s){return z(this.configuration).deleteOrg(e,r,s).then(o=>o(this.axios,this.basePath))}deleteOrgPool(e,r,s){return z(this.configuration).deleteOrgPool(e,r,s).then(o=>o(this.axios,this.basePath))}getOrg(e,r){return z(this.configuration).getOrg(e,r).then(s=>s(this.axios,this.basePath))}getOrgPool(e,r,s){return z(this.configuration).getOrgPool(e,r,s).then(o=>o(this.axios,this.basePath))}getOrgWebhookInfo(e,r){return z(this.configuration).getOrgWebhookInfo(e,r).then(s=>s(this.axios,this.basePath))}installOrgWebhook(e,r,s){return z(this.configuration).installOrgWebhook(e,r,s).then(o=>o(this.axios,this.basePath))}listOrgInstances(e,r){return z(this.configuration).listOrgInstances(e,r).then(s=>s(this.axios,this.basePath))}listOrgPools(e,r){return z(this.configuration).listOrgPools(e,r).then(s=>s(this.axios,this.basePath))}listOrgScaleSets(e,r){return z(this.configuration).listOrgScaleSets(e,r).then(s=>s(this.axios,this.basePath))}listOrgs(e,r,s){return z(this.configuration).listOrgs(e,r,s).then(o=>o(this.axios,this.basePath))}uninstallOrgWebhook(e,r){return z(this.configuration).uninstallOrgWebhook(e,r).then(s=>s(this.axios,this.basePath))}updateOrg(e,r,s){return z(this.configuration).updateOrg(e,r,s).then(o=>o(this.axios,this.basePath))}updateOrgPool(e,r,s,o){return z(this.configuration).updateOrgPool(e,r,s,o).then(a=>a(this.axios,this.basePath))}}const So=function(t){return{createEnterprisePool:async(e,r,s={})=>{d("createEnterprisePool","enterpriseID",e),d("createEnterprisePool","body",r);const o="/enterprises/{enterpriseID}/pools".replace("{enterpriseID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},createOrgPool:async(e,r,s={})=>{d("createOrgPool","orgID",e),d("createOrgPool","body",r);const o="/organizations/{orgID}/pools".replace("{orgID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},createRepoPool:async(e,r,s={})=>{d("createRepoPool","repoID",e),d("createRepoPool","body",r);const o="/repositories/{repoID}/pools".replace("{repoID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},deleteEnterprisePool:async(e,r,s={})=>{d("deleteEnterprisePool","enterpriseID",e),d("deleteEnterprisePool","poolID",r);const o="/enterprises/{enterpriseID}/pools/{poolID}".replace("{enterpriseID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"DELETE",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},deleteOrgPool:async(e,r,s={})=>{d("deleteOrgPool","orgID",e),d("deleteOrgPool","poolID",r);const o="/organizations/{orgID}/pools/{poolID}".replace("{orgID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"DELETE",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},deletePool:async(e,r={})=>{d("deletePool","poolID",e);const s="/pools/{poolID}".replace("{poolID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"DELETE",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},deleteRepoPool:async(e,r,s={})=>{d("deleteRepoPool","repoID",e),d("deleteRepoPool","poolID",r);const o="/repositories/{repoID}/pools/{poolID}".replace("{repoID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"DELETE",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},getEnterprisePool:async(e,r,s={})=>{d("getEnterprisePool","enterpriseID",e),d("getEnterprisePool","poolID",r);const o="/enterprises/{enterpriseID}/pools/{poolID}".replace("{enterpriseID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"GET",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},getOrgPool:async(e,r,s={})=>{d("getOrgPool","orgID",e),d("getOrgPool","poolID",r);const o="/organizations/{orgID}/pools/{poolID}".replace("{orgID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"GET",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},getPool:async(e,r={})=>{d("getPool","poolID",e);const s="/pools/{poolID}".replace("{poolID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},getRepoPool:async(e,r,s={})=>{d("getRepoPool","repoID",e),d("getRepoPool","poolID",r);const o="/repositories/{repoID}/pools/{poolID}".replace("{repoID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"GET",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},listEnterprisePools:async(e,r={})=>{d("listEnterprisePools","enterpriseID",e);const s="/enterprises/{enterpriseID}/pools".replace("{enterpriseID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listOrgPools:async(e,r={})=>{d("listOrgPools","orgID",e);const s="/organizations/{orgID}/pools".replace("{orgID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listPools:async(e={})=>{const r="/pools",s=new URL(r,b);let o;t&&(o=t.baseOptions);const a={method:"GET",...o,...e},n={},l={};await m(n,"Authorization",t),V(s,l);let i=o&&o.headers?o.headers:{};return a.headers={...n,...i,...e.headers},{url:S(s),options:a}},listRepoPools:async(e,r={})=>{d("listRepoPools","repoID",e);const s="/repositories/{repoID}/pools".replace("{repoID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},updateEnterprisePool:async(e,r,s,o={})=>{d("updateEnterprisePool","enterpriseID",e),d("updateEnterprisePool","poolID",r),d("updateEnterprisePool","body",s);const a="/enterprises/{enterpriseID}/pools/{poolID}".replace("{enterpriseID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),n=new URL(a,b);let l;t&&(l=t.baseOptions);const i={method:"PUT",...l,...o},c={},p={};await m(c,"Authorization",t),c["Content-Type"]="application/json",V(n,p);let R=l&&l.headers?l.headers:{};return i.headers={...c,...R,...o.headers},i.data=x(s,i,t),{url:S(n),options:i}},updateOrgPool:async(e,r,s,o={})=>{d("updateOrgPool","orgID",e),d("updateOrgPool","poolID",r),d("updateOrgPool","body",s);const a="/organizations/{orgID}/pools/{poolID}".replace("{orgID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),n=new URL(a,b);let l;t&&(l=t.baseOptions);const i={method:"PUT",...l,...o},c={},p={};await m(c,"Authorization",t),c["Content-Type"]="application/json",V(n,p);let R=l&&l.headers?l.headers:{};return i.headers={...c,...R,...o.headers},i.data=x(s,i,t),{url:S(n),options:i}},updatePool:async(e,r,s={})=>{d("updatePool","poolID",e),d("updatePool","body",r);const o="/pools/{poolID}".replace("{poolID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"PUT",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},updateRepoPool:async(e,r,s,o={})=>{d("updateRepoPool","repoID",e),d("updateRepoPool","poolID",r),d("updateRepoPool","body",s);const a="/repositories/{repoID}/pools/{poolID}".replace("{repoID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),n=new URL(a,b);let l;t&&(l=t.baseOptions);const i={method:"PUT",...l,...o},c={},p={};await m(c,"Authorization",t),c["Content-Type"]="application/json",V(n,p);let R=l&&l.headers?l.headers:{};return i.headers={...c,...R,...o.headers},i.data=x(s,i,t),{url:S(n),options:i}}}},j=function(t){const e=So(t);return{async createEnterprisePool(r,s,o){const a=await e.createEnterprisePool(r,s,o),n=t?.serverIndex??0,l=P["PoolsApi.createEnterprisePool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async createOrgPool(r,s,o){const a=await e.createOrgPool(r,s,o),n=t?.serverIndex??0,l=P["PoolsApi.createOrgPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async createRepoPool(r,s,o){const a=await e.createRepoPool(r,s,o),n=t?.serverIndex??0,l=P["PoolsApi.createRepoPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async deleteEnterprisePool(r,s,o){const a=await e.deleteEnterprisePool(r,s,o),n=t?.serverIndex??0,l=P["PoolsApi.deleteEnterprisePool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async deleteOrgPool(r,s,o){const a=await e.deleteOrgPool(r,s,o),n=t?.serverIndex??0,l=P["PoolsApi.deleteOrgPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async deletePool(r,s){const o=await e.deletePool(r,s),a=t?.serverIndex??0,n=P["PoolsApi.deletePool"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async deleteRepoPool(r,s,o){const a=await e.deleteRepoPool(r,s,o),n=t?.serverIndex??0,l=P["PoolsApi.deleteRepoPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async getEnterprisePool(r,s,o){const a=await e.getEnterprisePool(r,s,o),n=t?.serverIndex??0,l=P["PoolsApi.getEnterprisePool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async getOrgPool(r,s,o){const a=await e.getOrgPool(r,s,o),n=t?.serverIndex??0,l=P["PoolsApi.getOrgPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async getPool(r,s){const o=await e.getPool(r,s),a=t?.serverIndex??0,n=P["PoolsApi.getPool"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async getRepoPool(r,s,o){const a=await e.getRepoPool(r,s,o),n=t?.serverIndex??0,l=P["PoolsApi.getRepoPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async listEnterprisePools(r,s){const o=await e.listEnterprisePools(r,s),a=t?.serverIndex??0,n=P["PoolsApi.listEnterprisePools"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listOrgPools(r,s){const o=await e.listOrgPools(r,s),a=t?.serverIndex??0,n=P["PoolsApi.listOrgPools"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listPools(r){const s=await e.listPools(r),o=t?.serverIndex??0,a=P["PoolsApi.listPools"]?.[o]?.url;return(n,l)=>A(s,u,O,t)(n,a||l)},async listRepoPools(r,s){const o=await e.listRepoPools(r,s),a=t?.serverIndex??0,n=P["PoolsApi.listRepoPools"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async updateEnterprisePool(r,s,o,a){const n=await e.updateEnterprisePool(r,s,o,a),l=t?.serverIndex??0,i=P["PoolsApi.updateEnterprisePool"]?.[l]?.url;return(c,p)=>A(n,u,O,t)(c,i||p)},async updateOrgPool(r,s,o,a){const n=await e.updateOrgPool(r,s,o,a),l=t?.serverIndex??0,i=P["PoolsApi.updateOrgPool"]?.[l]?.url;return(c,p)=>A(n,u,O,t)(c,i||p)},async updatePool(r,s,o){const a=await e.updatePool(r,s,o),n=t?.serverIndex??0,l=P["PoolsApi.updatePool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async updateRepoPool(r,s,o,a){const n=await e.updateRepoPool(r,s,o,a),l=t?.serverIndex??0,i=P["PoolsApi.updateRepoPool"]?.[l]?.url;return(c,p)=>A(n,u,O,t)(c,i||p)}}};class $t extends H{createEnterprisePool(e,r,s){return j(this.configuration).createEnterprisePool(e,r,s).then(o=>o(this.axios,this.basePath))}createOrgPool(e,r,s){return j(this.configuration).createOrgPool(e,r,s).then(o=>o(this.axios,this.basePath))}createRepoPool(e,r,s){return j(this.configuration).createRepoPool(e,r,s).then(o=>o(this.axios,this.basePath))}deleteEnterprisePool(e,r,s){return j(this.configuration).deleteEnterprisePool(e,r,s).then(o=>o(this.axios,this.basePath))}deleteOrgPool(e,r,s){return j(this.configuration).deleteOrgPool(e,r,s).then(o=>o(this.axios,this.basePath))}deletePool(e,r){return j(this.configuration).deletePool(e,r).then(s=>s(this.axios,this.basePath))}deleteRepoPool(e,r,s){return j(this.configuration).deleteRepoPool(e,r,s).then(o=>o(this.axios,this.basePath))}getEnterprisePool(e,r,s){return j(this.configuration).getEnterprisePool(e,r,s).then(o=>o(this.axios,this.basePath))}getOrgPool(e,r,s){return j(this.configuration).getOrgPool(e,r,s).then(o=>o(this.axios,this.basePath))}getPool(e,r){return j(this.configuration).getPool(e,r).then(s=>s(this.axios,this.basePath))}getRepoPool(e,r,s){return j(this.configuration).getRepoPool(e,r,s).then(o=>o(this.axios,this.basePath))}listEnterprisePools(e,r){return j(this.configuration).listEnterprisePools(e,r).then(s=>s(this.axios,this.basePath))}listOrgPools(e,r){return j(this.configuration).listOrgPools(e,r).then(s=>s(this.axios,this.basePath))}listPools(e){return j(this.configuration).listPools(e).then(r=>r(this.axios,this.basePath))}listRepoPools(e,r){return j(this.configuration).listRepoPools(e,r).then(s=>s(this.axios,this.basePath))}updateEnterprisePool(e,r,s,o){return j(this.configuration).updateEnterprisePool(e,r,s,o).then(a=>a(this.axios,this.basePath))}updateOrgPool(e,r,s,o){return j(this.configuration).updateOrgPool(e,r,s,o).then(a=>a(this.axios,this.basePath))}updatePool(e,r,s){return j(this.configuration).updatePool(e,r,s).then(o=>o(this.axios,this.basePath))}updateRepoPool(e,r,s,o){return j(this.configuration).updateRepoPool(e,r,s,o).then(a=>a(this.axios,this.basePath))}}const Ao=function(t){return{listProviders:async(e={})=>{const r="/providers",s=new URL(r,b);let o;t&&(o=t.baseOptions);const a={method:"GET",...o,...e},n={},l={};await m(n,"Authorization",t),V(s,l);let i=o&&o.headers?o.headers:{};return a.headers={...n,...i,...e.headers},{url:S(s),options:a}}}},Ro=function(t){const e=Ao(t);return{async listProviders(r){const s=await e.listProviders(r),o=t?.serverIndex??0,a=P["ProvidersApi.listProviders"]?.[o]?.url;return(n,l)=>A(s,u,O,t)(n,a||l)}}};class Nt extends H{listProviders(e){return Ro(this.configuration).listProviders(e).then(r=>r(this.axios,this.basePath))}}const fo=function(t){return{createRepo:async(e,r={})=>{d("createRepo","body",e);const s="/repositories",o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"POST",...a,...r},l={},i={};await m(l,"Authorization",t),l["Content-Type"]="application/json",V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},n.data=x(e,n,t),{url:S(o),options:n}},createRepoPool:async(e,r,s={})=>{d("createRepoPool","repoID",e),d("createRepoPool","body",r);const o="/repositories/{repoID}/pools".replace("{repoID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},createRepoScaleSet:async(e,r,s={})=>{d("createRepoScaleSet","repoID",e),d("createRepoScaleSet","body",r);const o="/repositories/{repoID}/scalesets".replace("{repoID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},deleteRepo:async(e,r,s={})=>{d("deleteRepo","repoID",e);const o="/repositories/{repoID}".replace("{repoID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"DELETE",...n,...s},i={},c={};await m(i,"Authorization",t),r!==void 0&&(c.keepWebhook=r),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},deleteRepoPool:async(e,r,s={})=>{d("deleteRepoPool","repoID",e),d("deleteRepoPool","poolID",r);const o="/repositories/{repoID}/pools/{poolID}".replace("{repoID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"DELETE",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},getRepo:async(e,r={})=>{d("getRepo","repoID",e);const s="/repositories/{repoID}".replace("{repoID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},getRepoPool:async(e,r,s={})=>{d("getRepoPool","repoID",e),d("getRepoPool","poolID",r);const o="/repositories/{repoID}/pools/{poolID}".replace("{repoID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"GET",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},getRepoWebhookInfo:async(e,r={})=>{d("getRepoWebhookInfo","repoID",e);const s="/repositories/{repoID}/webhook".replace("{repoID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},installRepoWebhook:async(e,r,s={})=>{d("installRepoWebhook","repoID",e),d("installRepoWebhook","body",r);const o="/repositories/{repoID}/webhook".replace("{repoID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},listRepoInstances:async(e,r={})=>{d("listRepoInstances","repoID",e);const s="/repositories/{repoID}/instances".replace("{repoID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listRepoPools:async(e,r={})=>{d("listRepoPools","repoID",e);const s="/repositories/{repoID}/pools".replace("{repoID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listRepoScaleSets:async(e,r={})=>{d("listRepoScaleSets","repoID",e);const s="/repositories/{repoID}/scalesets".replace("{repoID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listRepos:async(e,r,s,o={})=>{const a="/repositories",n=new URL(a,b);let l;t&&(l=t.baseOptions);const i={method:"GET",...l,...o},c={},p={};await m(c,"Authorization",t),e!==void 0&&(p.owner=e),r!==void 0&&(p.name=r),s!==void 0&&(p.endpoint=s),V(n,p);let R=l&&l.headers?l.headers:{};return i.headers={...c,...R,...o.headers},{url:S(n),options:i}},uninstallRepoWebhook:async(e,r={})=>{d("uninstallRepoWebhook","repoID",e);const s="/repositories/{repoID}/webhook".replace("{repoID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"DELETE",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},updateRepo:async(e,r,s={})=>{d("updateRepo","repoID",e),d("updateRepo","body",r);const o="/repositories/{repoID}".replace("{repoID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"PUT",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},updateRepoPool:async(e,r,s,o={})=>{d("updateRepoPool","repoID",e),d("updateRepoPool","poolID",r),d("updateRepoPool","body",s);const a="/repositories/{repoID}/pools/{poolID}".replace("{repoID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),n=new URL(a,b);let l;t&&(l=t.baseOptions);const i={method:"PUT",...l,...o},c={},p={};await m(c,"Authorization",t),c["Content-Type"]="application/json",V(n,p);let R=l&&l.headers?l.headers:{};return i.headers={...c,...R,...o.headers},i.data=x(s,i,t),{url:S(n),options:i}}}},F=function(t){const e=fo(t);return{async createRepo(r,s){const o=await e.createRepo(r,s),a=t?.serverIndex??0,n=P["RepositoriesApi.createRepo"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async createRepoPool(r,s,o){const a=await e.createRepoPool(r,s,o),n=t?.serverIndex??0,l=P["RepositoriesApi.createRepoPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async createRepoScaleSet(r,s,o){const a=await e.createRepoScaleSet(r,s,o),n=t?.serverIndex??0,l=P["RepositoriesApi.createRepoScaleSet"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async deleteRepo(r,s,o){const a=await e.deleteRepo(r,s,o),n=t?.serverIndex??0,l=P["RepositoriesApi.deleteRepo"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async deleteRepoPool(r,s,o){const a=await e.deleteRepoPool(r,s,o),n=t?.serverIndex??0,l=P["RepositoriesApi.deleteRepoPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async getRepo(r,s){const o=await e.getRepo(r,s),a=t?.serverIndex??0,n=P["RepositoriesApi.getRepo"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async getRepoPool(r,s,o){const a=await e.getRepoPool(r,s,o),n=t?.serverIndex??0,l=P["RepositoriesApi.getRepoPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async getRepoWebhookInfo(r,s){const o=await e.getRepoWebhookInfo(r,s),a=t?.serverIndex??0,n=P["RepositoriesApi.getRepoWebhookInfo"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async installRepoWebhook(r,s,o){const a=await e.installRepoWebhook(r,s,o),n=t?.serverIndex??0,l=P["RepositoriesApi.installRepoWebhook"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async listRepoInstances(r,s){const o=await e.listRepoInstances(r,s),a=t?.serverIndex??0,n=P["RepositoriesApi.listRepoInstances"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listRepoPools(r,s){const o=await e.listRepoPools(r,s),a=t?.serverIndex??0,n=P["RepositoriesApi.listRepoPools"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listRepoScaleSets(r,s){const o=await e.listRepoScaleSets(r,s),a=t?.serverIndex??0,n=P["RepositoriesApi.listRepoScaleSets"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listRepos(r,s,o,a){const n=await e.listRepos(r,s,o,a),l=t?.serverIndex??0,i=P["RepositoriesApi.listRepos"]?.[l]?.url;return(c,p)=>A(n,u,O,t)(c,i||p)},async uninstallRepoWebhook(r,s){const o=await e.uninstallRepoWebhook(r,s),a=t?.serverIndex??0,n=P["RepositoriesApi.uninstallRepoWebhook"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async updateRepo(r,s,o){const a=await e.updateRepo(r,s,o),n=t?.serverIndex??0,l=P["RepositoriesApi.updateRepo"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async updateRepoPool(r,s,o,a){const n=await e.updateRepoPool(r,s,o,a),l=t?.serverIndex??0,i=P["RepositoriesApi.updateRepoPool"]?.[l]?.url;return(c,p)=>A(n,u,O,t)(c,i||p)}}};class Wt extends H{createRepo(e,r){return F(this.configuration).createRepo(e,r).then(s=>s(this.axios,this.basePath))}createRepoPool(e,r,s){return F(this.configuration).createRepoPool(e,r,s).then(o=>o(this.axios,this.basePath))}createRepoScaleSet(e,r,s){return F(this.configuration).createRepoScaleSet(e,r,s).then(o=>o(this.axios,this.basePath))}deleteRepo(e,r,s){return F(this.configuration).deleteRepo(e,r,s).then(o=>o(this.axios,this.basePath))}deleteRepoPool(e,r,s){return F(this.configuration).deleteRepoPool(e,r,s).then(o=>o(this.axios,this.basePath))}getRepo(e,r){return F(this.configuration).getRepo(e,r).then(s=>s(this.axios,this.basePath))}getRepoPool(e,r,s){return F(this.configuration).getRepoPool(e,r,s).then(o=>o(this.axios,this.basePath))}getRepoWebhookInfo(e,r){return F(this.configuration).getRepoWebhookInfo(e,r).then(s=>s(this.axios,this.basePath))}installRepoWebhook(e,r,s){return F(this.configuration).installRepoWebhook(e,r,s).then(o=>o(this.axios,this.basePath))}listRepoInstances(e,r){return F(this.configuration).listRepoInstances(e,r).then(s=>s(this.axios,this.basePath))}listRepoPools(e,r){return F(this.configuration).listRepoPools(e,r).then(s=>s(this.axios,this.basePath))}listRepoScaleSets(e,r){return F(this.configuration).listRepoScaleSets(e,r).then(s=>s(this.axios,this.basePath))}listRepos(e,r,s,o){return F(this.configuration).listRepos(e,r,s,o).then(a=>a(this.axios,this.basePath))}uninstallRepoWebhook(e,r){return F(this.configuration).uninstallRepoWebhook(e,r).then(s=>s(this.axios,this.basePath))}updateRepo(e,r,s){return F(this.configuration).updateRepo(e,r,s).then(o=>o(this.axios,this.basePath))}updateRepoPool(e,r,s,o){return F(this.configuration).updateRepoPool(e,r,s,o).then(a=>a(this.axios,this.basePath))}}const yo=function(t){return{createEnterpriseScaleSet:async(e,r,s={})=>{d("createEnterpriseScaleSet","enterpriseID",e),d("createEnterpriseScaleSet","body",r);const o="/enterprises/{enterpriseID}/scalesets".replace("{enterpriseID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},createOrgScaleSet:async(e,r,s={})=>{d("createOrgScaleSet","orgID",e),d("createOrgScaleSet","body",r);const o="/organizations/{orgID}/scalesets".replace("{orgID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},createRepoScaleSet:async(e,r,s={})=>{d("createRepoScaleSet","repoID",e),d("createRepoScaleSet","body",r);const o="/repositories/{repoID}/scalesets".replace("{repoID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},deleteScaleSet:async(e,r={})=>{d("deleteScaleSet","scalesetID",e);const s="/scalesets/{scalesetID}".replace("{scalesetID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"DELETE",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},getScaleSet:async(e,r={})=>{d("getScaleSet","scalesetID",e);const s="/scalesets/{scalesetID}".replace("{scalesetID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listEnterpriseScaleSets:async(e,r={})=>{d("listEnterpriseScaleSets","enterpriseID",e);const s="/enterprises/{enterpriseID}/scalesets".replace("{enterpriseID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listOrgScaleSets:async(e,r={})=>{d("listOrgScaleSets","orgID",e);const s="/organizations/{orgID}/scalesets".replace("{orgID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listRepoScaleSets:async(e,r={})=>{d("listRepoScaleSets","repoID",e);const s="/repositories/{repoID}/scalesets".replace("{repoID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listScalesets:async(e={})=>{const r="/scalesets",s=new URL(r,b);let o;t&&(o=t.baseOptions);const a={method:"GET",...o,...e},n={},l={};await m(n,"Authorization",t),V(s,l);let i=o&&o.headers?o.headers:{};return a.headers={...n,...i,...e.headers},{url:S(s),options:a}},updateScaleSet:async(e,r,s={})=>{d("updateScaleSet","scalesetID",e),d("updateScaleSet","body",r);const o="/scalesets/{scalesetID}".replace("{scalesetID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"PUT",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}}}},X=function(t){const e=yo(t);return{async createEnterpriseScaleSet(r,s,o){const a=await e.createEnterpriseScaleSet(r,s,o),n=t?.serverIndex??0,l=P["ScalesetsApi.createEnterpriseScaleSet"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async createOrgScaleSet(r,s,o){const a=await e.createOrgScaleSet(r,s,o),n=t?.serverIndex??0,l=P["ScalesetsApi.createOrgScaleSet"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async createRepoScaleSet(r,s,o){const a=await e.createRepoScaleSet(r,s,o),n=t?.serverIndex??0,l=P["ScalesetsApi.createRepoScaleSet"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async deleteScaleSet(r,s){const o=await e.deleteScaleSet(r,s),a=t?.serverIndex??0,n=P["ScalesetsApi.deleteScaleSet"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async getScaleSet(r,s){const o=await e.getScaleSet(r,s),a=t?.serverIndex??0,n=P["ScalesetsApi.getScaleSet"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listEnterpriseScaleSets(r,s){const o=await e.listEnterpriseScaleSets(r,s),a=t?.serverIndex??0,n=P["ScalesetsApi.listEnterpriseScaleSets"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listOrgScaleSets(r,s){const o=await e.listOrgScaleSets(r,s),a=t?.serverIndex??0,n=P["ScalesetsApi.listOrgScaleSets"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listRepoScaleSets(r,s){const o=await e.listRepoScaleSets(r,s),a=t?.serverIndex??0,n=P["ScalesetsApi.listRepoScaleSets"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listScalesets(r){const s=await e.listScalesets(r),o=t?.serverIndex??0,a=P["ScalesetsApi.listScalesets"]?.[o]?.url;return(n,l)=>A(s,u,O,t)(n,a||l)},async updateScaleSet(r,s,o){const a=await e.updateScaleSet(r,s,o),n=t?.serverIndex??0,l=P["ScalesetsApi.updateScaleSet"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)}}};class Qt extends H{createEnterpriseScaleSet(e,r,s){return X(this.configuration).createEnterpriseScaleSet(e,r,s).then(o=>o(this.axios,this.basePath))}createOrgScaleSet(e,r,s){return X(this.configuration).createOrgScaleSet(e,r,s).then(o=>o(this.axios,this.basePath))}createRepoScaleSet(e,r,s){return X(this.configuration).createRepoScaleSet(e,r,s).then(o=>o(this.axios,this.basePath))}deleteScaleSet(e,r){return X(this.configuration).deleteScaleSet(e,r).then(s=>s(this.axios,this.basePath))}getScaleSet(e,r){return X(this.configuration).getScaleSet(e,r).then(s=>s(this.axios,this.basePath))}listEnterpriseScaleSets(e,r){return X(this.configuration).listEnterpriseScaleSets(e,r).then(s=>s(this.axios,this.basePath))}listOrgScaleSets(e,r){return X(this.configuration).listOrgScaleSets(e,r).then(s=>s(this.axios,this.basePath))}listRepoScaleSets(e,r){return X(this.configuration).listRepoScaleSets(e,r).then(s=>s(this.axios,this.basePath))}listScalesets(e){return X(this.configuration).listScalesets(e).then(r=>r(this.axios,this.basePath))}updateScaleSet(e,r,s){return X(this.configuration).updateScaleSet(e,r,s).then(o=>o(this.axios,this.basePath))}}class Mt{apiKey;username;password;accessToken;basePath;serverIndex;baseOptions;formDataCtor;constructor(e={}){this.apiKey=e.apiKey,this.username=e.username,this.password=e.password,this.accessToken=e.accessToken,this.basePath=e.basePath,this.serverIndex=e.serverIndex,this.baseOptions={...e.baseOptions,headers:{...e.baseOptions?.headers}},this.formDataCtor=e.formDataCtor}isJsonMime(e){const r=new RegExp("^(application/json|[^;/ ]+/[^;/ ]+[+]json)[ ]*(;.*)?$","i");return e!==null&&(r.test(e)||e.toLowerCase()==="application/json-patch+json")}}class gr{baseUrl;token;config;isDevelopmentMode(){return typeof window>"u"?!1:window.location.port==="5173"}loginApi;controllerInfoApi;controllerApi;endpointsApi;credentialsApi;repositoriesApi;organizationsApi;enterprisesApi;poolsApi;scaleSetsApi;instancesApi;providersApi;firstRunApi;hooksApi;constructor(e=""){this.baseUrl=e||window.location.origin;const r=this.isDevelopmentMode();this.config=new Mt({basePath:`${this.baseUrl}/api/v1`,accessToken:()=>this.token||"",baseOptions:{withCredentials:!r}}),this.loginApi=new qt(this.config),this.controllerInfoApi=new Lt(this.config),this.controllerApi=new Dt(this.config),this.endpointsApi=new kt(this.config),this.credentialsApi=new jt(this.config),this.repositoriesApi=new Wt(this.config),this.organizationsApi=new Ht(this.config),this.enterprisesApi=new zt(this.config),this.poolsApi=new $t(this.config),this.scaleSetsApi=new Qt(this.config),this.instancesApi=new _t(this.config),this.providersApi=new Nt(this.config),this.firstRunApi=new Ft(this.config),this.hooksApi=new Gt(this.config)}setToken(e){this.token=e;const r=this.isDevelopmentMode();this.config=new Mt({basePath:`${this.baseUrl}/api/v1`,accessToken:()=>e,baseOptions:{withCredentials:!r}}),this.loginApi=new qt(this.config),this.controllerInfoApi=new Lt(this.config),this.controllerApi=new Dt(this.config),this.endpointsApi=new kt(this.config),this.credentialsApi=new jt(this.config),this.repositoriesApi=new Wt(this.config),this.organizationsApi=new Ht(this.config),this.enterprisesApi=new zt(this.config),this.poolsApi=new $t(this.config),this.scaleSetsApi=new Qt(this.config),this.instancesApi=new _t(this.config),this.providersApi=new Nt(this.config),this.firstRunApi=new Ft(this.config),this.hooksApi=new Gt(this.config)}async login(e){const r={username:e.username,password:e.password},o=(await this.loginApi.login(r)).data.token;if(o)return this.setToken(o),{token:o};throw new Error("Login failed")}async getControllerInfo(){return(await this.controllerInfoApi.controllerInfo()).data}async listGithubEndpoints(){return(await this.endpointsApi.listGithubEndpoints()).data||[]}async getGithubEndpoint(e){return(await this.endpointsApi.getGithubEndpoint(e)).data}async createGithubEndpoint(e){return(await this.endpointsApi.createGithubEndpoint(e)).data}async updateGithubEndpoint(e,r){return(await this.endpointsApi.updateGithubEndpoint(e,r)).data}async deleteGithubEndpoint(e){await this.endpointsApi.deleteGithubEndpoint(e)}async listGiteaEndpoints(){return(await this.endpointsApi.listGiteaEndpoints()).data||[]}async getGiteaEndpoint(e){return(await this.endpointsApi.getGiteaEndpoint(e)).data}async createGiteaEndpoint(e){return(await this.endpointsApi.createGiteaEndpoint(e)).data}async updateGiteaEndpoint(e,r){return(await this.endpointsApi.updateGiteaEndpoint(e,r)).data}async deleteGiteaEndpoint(e){await this.endpointsApi.deleteGiteaEndpoint(e)}async listAllEndpoints(){const[e,r]=await Promise.all([this.listGithubEndpoints().catch(()=>[]),this.listGiteaEndpoints().catch(()=>[])]);return[...e.map(s=>({...s,endpoint_type:"github"})),...r.map(s=>({...s,endpoint_type:"gitea"}))]}async listGithubCredentials(){return(await this.credentialsApi.listCredentials()).data||[]}async getGithubCredentials(e){return(await this.credentialsApi.getCredentials(e)).data}async createGithubCredentials(e){return(await this.credentialsApi.createCredentials(e)).data}async updateGithubCredentials(e,r){return(await this.credentialsApi.updateCredentials(e,r)).data}async deleteGithubCredentials(e){await this.credentialsApi.deleteCredentials(e)}async listGiteaCredentials(){return(await this.credentialsApi.listGiteaCredentials()).data||[]}async getGiteaCredentials(e){return(await this.credentialsApi.getGiteaCredentials(e)).data}async createGiteaCredentials(e){return(await this.credentialsApi.createGiteaCredentials(e)).data}async updateGiteaCredentials(e,r){return(await this.credentialsApi.updateGiteaCredentials(e,r)).data}async deleteGiteaCredentials(e){await this.credentialsApi.deleteGiteaCredentials(e)}async listAllCredentials(){const[e,r]=await Promise.all([this.listGithubCredentials().catch(()=>[]),this.listGiteaCredentials().catch(()=>[])]);return[...e,...r]}async installRepositoryWebhook(e,r={}){await this.repositoriesApi.installRepoWebhook(e,r)}async uninstallRepositoryWebhook(e){await this.hooksApi.uninstallRepoWebhook(e)}async getRepositoryWebhookInfo(e){return(await this.hooksApi.getRepoWebhookInfo(e)).data}async listRepositories(){return(await this.repositoriesApi.listRepos()).data||[]}async getRepository(e){return(await this.repositoriesApi.getRepo(e)).data}async createRepository(e){return(await this.repositoriesApi.createRepo(e)).data}async updateRepository(e,r){return(await this.repositoriesApi.updateRepo(e,r)).data}async deleteRepository(e){await this.repositoriesApi.deleteRepo(e)}async installRepoWebhook(e){await this.repositoriesApi.installRepoWebhook(e,{})}async listRepositoryPools(e){return(await this.repositoriesApi.listRepoPools(e)).data||[]}async listRepositoryInstances(e){return(await this.repositoriesApi.listRepoInstances(e)).data||[]}async createRepositoryPool(e,r){return(await this.repositoriesApi.createRepoPool(e,r)).data}async installOrganizationWebhook(e,r={}){await this.organizationsApi.installOrgWebhook(e,r)}async uninstallOrganizationWebhook(e){await this.hooksApi.uninstallOrgWebhook(e)}async getOrganizationWebhookInfo(e){return(await this.hooksApi.getOrgWebhookInfo(e)).data}async listOrganizations(){return(await this.organizationsApi.listOrgs()).data||[]}async getOrganization(e){return(await this.organizationsApi.getOrg(e)).data}async createOrganization(e){return(await this.organizationsApi.createOrg(e)).data}async updateOrganization(e,r){return(await this.organizationsApi.updateOrg(e,r)).data}async deleteOrganization(e){await this.organizationsApi.deleteOrg(e)}async listOrganizationPools(e){return(await this.organizationsApi.listOrgPools(e)).data||[]}async listOrganizationInstances(e){return(await this.organizationsApi.listOrgInstances(e)).data||[]}async createOrganizationPool(e,r){return(await this.organizationsApi.createOrgPool(e,r)).data}async listEnterprises(){return(await this.enterprisesApi.listEnterprises()).data||[]}async getEnterprise(e){return(await this.enterprisesApi.getEnterprise(e)).data}async createEnterprise(e){return(await this.enterprisesApi.createEnterprise(e)).data}async updateEnterprise(e,r){return(await this.enterprisesApi.updateEnterprise(e,r)).data}async deleteEnterprise(e){await this.enterprisesApi.deleteEnterprise(e)}async listEnterprisePools(e){return(await this.enterprisesApi.listEnterprisePools(e)).data||[]}async listEnterpriseInstances(e){return(await this.enterprisesApi.listEnterpriseInstances(e)).data||[]}async createEnterprisePool(e,r){return(await this.enterprisesApi.createEnterprisePool(e,r)).data}async createRepositoryScaleSet(e,r){return(await this.repositoriesApi.createRepoScaleSet(e,r)).data}async listRepositoryScaleSets(e){return(await this.repositoriesApi.listRepoScaleSets(e)).data||[]}async createOrganizationScaleSet(e,r){return(await this.organizationsApi.createOrgScaleSet(e,r)).data}async listOrganizationScaleSets(e){return(await this.organizationsApi.listOrgScaleSets(e)).data||[]}async createEnterpriseScaleSet(e,r){return(await this.enterprisesApi.createEnterpriseScaleSet(e,r)).data}async listEnterpriseScaleSets(e){return(await this.enterprisesApi.listEnterpriseScaleSets(e)).data||[]}async listPools(){return(await this.poolsApi.listPools()).data||[]}async listAllPools(){return this.listPools()}async getPool(e){return(await this.poolsApi.getPool(e)).data}async updatePool(e,r){return(await this.poolsApi.updatePool(e,r)).data}async deletePool(e){await this.poolsApi.deletePool(e)}async listScaleSets(){return(await this.scaleSetsApi.listScalesets()).data||[]}async getScaleSet(e){return(await this.scaleSetsApi.getScaleSet(e.toString())).data}async updateScaleSet(e,r){return(await this.scaleSetsApi.updateScaleSet(e.toString(),r)).data}async deleteScaleSet(e){await this.scaleSetsApi.deleteScaleSet(e.toString())}async listInstances(){return(await this.instancesApi.listInstances()).data||[]}async getInstance(e){return(await this.instancesApi.getInstance(e)).data}async deleteInstance(e){await this.instancesApi.deleteInstance(e)}async listProviders(){return(await this.providersApi.listProviders()).data||[]}async listCredentials(){return this.listAllCredentials()}async listEndpoints(){return this.listAllEndpoints()}async firstRun(e){return(await this.firstRunApi.firstRun(e)).data}async updateController(e){return(await this.controllerApi.updateController(e)).data}}new gr;class wo extends gr{constructor(e=""){super(e)}}const rn=new wo;var Io=nt(''),Eo=nt(''),go=nt(''),xo=hs("");function sn(t,e){const r=bt(e,["children","$$slots","$$events","$$legacy"]),s=bt(r,["variant","size","disabled","loading","type","fullWidth","icon","iconPosition"]);cs(e,!1);const o=te(),a=te(),n=te(),l=te(),i=te(),c=te(),p=te(),R=te(),I=ps();let T=se(e,"variant",8,"primary"),f=se(e,"size",8,"md"),y=se(e,"disabled",8,!1),w=se(e,"loading",8,!1),C=se(e,"type",8,"button"),D=se(e,"fullWidth",8,!1),E=se(e,"icon",8,null),v=se(e,"iconPosition",8,"left");function U(){!y()&&!w()&&I("click")}ee(()=>{},()=>{re(o,"inline-flex items-center justify-center font-medium rounded-md transition-colors focus:outline-none focus:ring-2 focus:ring-offset-2 dark:focus:ring-offset-gray-900 cursor-pointer disabled:cursor-not-allowed")}),ee(()=>oe(f()),()=>{re(a,{sm:"px-3 py-2 text-sm",md:"px-4 py-2 text-sm",lg:"px-6 py-3 text-base"}[f()])}),ee(()=>oe(T()),()=>{re(n,{primary:"text-white bg-blue-600 hover:bg-blue-700 focus:ring-blue-500 disabled:bg-gray-400 disabled:hover:bg-gray-400",secondary:"text-gray-700 dark:text-gray-300 bg-white dark:bg-gray-700 border border-gray-300 dark:border-gray-600 hover:bg-gray-50 dark:hover:bg-gray-600 focus:ring-blue-500",danger:"text-white bg-red-600 hover:bg-red-700 focus:ring-red-500 disabled:bg-gray-400 disabled:hover:bg-gray-400",ghost:"text-gray-700 dark:text-gray-300 hover:bg-gray-50 dark:hover:bg-gray-800 focus:ring-blue-500"}[T()])}),ee(()=>oe(D()),()=>{re(l,D()?"w-full":"")}),ee(()=>oe(y()),()=>{re(i,y()?"opacity-50":"")}),ee(()=>(k(o),k(a),k(n),k(l),k(i)),()=>{re(c,[k(o),k(a),k(n),k(l),k(i)].filter(Boolean).join(" "))}),ee(()=>oe(f()),()=>{re(p,{sm:"h-4 w-4",md:"h-5 w-5",lg:"h-6 w-6"}[f()])}),ee(()=>(oe(v()),oe(f())),()=>{re(R,{sm:v()==="left"?"-ml-0.5 mr-2":"ml-2 -mr-0.5",md:v()==="left"?"-ml-1 mr-2":"ml-2 -mr-1",lg:v()==="left"?"-ml-1 mr-3":"ml-3 -mr-1"}[f()])}),ds(),Dr();var L=xo();Es(L,()=>({type:C(),disabled:y(),class:k(c),...s}));var B=qe(L);{var Z=W=>{var Q=Io();Ie(()=>Ee(Q,0,`animate-spin ${k(p)??""} ${v()==="left"?"-ml-1 mr-2":"ml-2 -mr-1"}`)),ue(W,Q)},ce=W=>{var Q=Ps(),Ge=bs(Q);{var Cr=_e=>{var ye=Eo(),Ur=qe(ye);mt(Ur,E,!0),He(ye),Ie(()=>Ee(ye,0,`${k(p)??""} ${k(R)??""}`)),ue(_e,ye)};$e(Ge,_e=>{E()&&v()==="left"&&_e(Cr)},!0)}ue(W,Q)};$e(B,W=>{w()?W(Z):W(ce,!1)})}var dt=Pt(B,2);ms(dt,e,"default",{});var xr=Pt(dt,2);{var vr=W=>{var Q=go(),Ge=qe(Q);mt(Ge,E,!0),He(Q),Ie(()=>Ee(Q,0,`${k(p)??""} ${k(R)??""}`)),ue(W,Q)};$e(xr,W=>{E()&&v()==="right"&&!w()&&W(vr)})}He(L),us("click",L,U),ue(t,L),Os()}export{sn as B,jo as a,Do as b,Ue as c,ko as d,ms as e,As as f,rn as g,mt as h,Es as i,ys as j,Lo as r,Ee as s}; diff --git a/webapp/assets/_app/immutable/chunks/DsnmJJEf.js b/webapp/assets/_app/immutable/chunks/DsnmJJEf.js deleted file mode 100644 index ca27dc73..00000000 --- a/webapp/assets/_app/immutable/chunks/DsnmJJEf.js +++ /dev/null @@ -1 +0,0 @@ -typeof window<"u"&&((window.__svelte??={}).v??=new Set).add("5"); diff --git a/webapp/assets/_app/immutable/chunks/XzGG0o_q.js b/webapp/assets/_app/immutable/chunks/XzGG0o_q.js deleted file mode 100644 index 2fbea6ec..00000000 --- a/webapp/assets/_app/immutable/chunks/XzGG0o_q.js +++ /dev/null @@ -1 +0,0 @@ -import"./DsnmJJEf.js";import{i as qe}from"./zNh6Oe5P.js";import{p as Fe,E as Ke,o as Ge,f as x,j as t,r,k as s,g as e,m,z as B,t as y,x as ue,u as l,v,n as T,s as i,e as be,c as u,D as He,d as Je}from"./sWNKMed7.js";import{p as ge,i as U}from"./Ccl3fNd2.js";import{e as Qe,i as Ve}from"./BuuPrWMc.js";import{r as me,b as ye,g as Xe}from"./DVl4ZBgx.js";import{a as Ye,b as Ze}from"./CLagxtgo.js";import{p as ea}from"./D4Caz1gY.js";import{e as xe}from"./BZiHL9L3.js";import{M as aa}from"./DN14Fk2Y.js";var ta=x('

                '),ra=x('
                Owner:
                '),oa=x('
                '),sa=x(""),na=x(''),ia=x('

                Leave empty to auto-generate a new secret

                '),da=x('
                Updating...
                '),la=x('

                Name:
                Endpoint:
                Current Credentials:
                Current Pool Balancer:

                Leave unchanged to keep current credentials

                Round Robin distributes jobs evenly across pools, Pack fills pools in order

                ');function _a(fe,D){Fe(D,!1);let d=ge(D,"entity",8),w=ge(D,"entityType",8);const $=Ke();let C=m(!1),k=m(""),M=m([]),R=m(!1),f=m(""),_=m(""),h=m(""),b=m(!1);function _e(){if(w()==="repository"){const c=d();return`${c.owner}/${c.name}`}return d().name||""}function W(){return w().charAt(0).toUpperCase()+w().slice(1)}function he(){return w()==="repository"&&d().owner||""}async function ke(){try{i(R,!0),i(M,await Xe.listCredentials())}catch(c){i(k,xe(c))}finally{i(R,!1)}}function we(){i(f,d().credentials_name||""),i(_,d().pool_balancing_type||"roundrobin"),i(h,""),i(b,!1)}async function Ce(){try{i(C,!0),i(k,"");const c={};let E=!1;if(e(f)&&e(f)!==d().credentials_name&&(c.credentials_name=e(f),E=!0),e(_)&&e(_)!==d().pool_balancing_type&&(c.pool_balancer_type=e(_),E=!0),e(b)){if(!e(h).trim()){i(k,"Please enter a webhook secret or uncheck the option to change it");return}c.webhook_secret=e(h),E=!0}if(!E){$("close");return}$("submit",c)}catch(c){i(k,xe(c))}finally{i(C,!1)}}Ge(()=>{ke(),we()}),qe(),aa(fe,{$$events:{close:()=>$("close")},children:(c,E)=>{var j=la(),A=t(j),N=t(A),Ee=t(N);r(N);var Y=s(N,2),Se=t(Y,!0);r(Y),r(A);var z=s(A,2),Z=t(z);{var Pe=a=>{var o=ta(),n=t(o),p=t(n,!0);r(n),r(o),y(()=>v(p,e(k))),u(a,o)};U(Z,a=>{e(k)&&a(Pe)})}var I=s(Z,2),L=t(I),Ue=t(L);r(L);var ee=s(L,2),ae=t(ee);{var $e=a=>{var o=ra(),n=s(t(o),2),p=t(n,!0);r(n),r(o),y(S=>v(p,S),[()=>l(he)]),u(a,o)};U(ae,a=>{w()==="repository"&&a($e)})}var O=s(ae,2),te=s(t(O),2),Be=t(te,!0);r(te),r(O);var q=s(O,2),re=s(t(q),2),Te=t(re,!0);r(re),r(q);var F=s(q,2),oe=s(t(F),2),De=t(oe,!0);r(oe),r(F);var se=s(F,2),ne=s(t(se),2),Me=t(ne,!0);r(ne),r(se),r(ee),r(I);var K=s(I,2),G=t(K),Re=s(t(G),2);{var We=a=>{var o=oa();u(a,o)},je=a=>{var o=na();y(()=>{e(f),ue(()=>{e(M)})});var n=t(o);n.value=n.__value="";var p=s(n);Qe(p,1,()=>e(M),Ve,(S,g)=>{var P=sa(),Oe=t(P);r(P);var pe={};y(()=>{v(Oe,`${e(g),l(()=>e(g).name)??""} (${e(g),l(()=>e(g).endpoint?.name||"Unknown")??""})`),pe!==(pe=(e(g),l(()=>e(g).name)))&&(P.value=(P.__value=(e(g),l(()=>e(g).name)))??"")}),u(S,P)}),r(o),ye(o,()=>e(f),S=>i(f,S)),u(a,o)};U(Re,a=>{e(R)?a(We):a(je,!1)})}B(2),r(G);var H=s(G,2),J=s(t(H),2);y(()=>{e(_),ue(()=>{})});var Q=t(J);Q.value=Q.__value="roundrobin";var ie=s(Q);ie.value=ie.__value="pack",r(J),B(2),r(H);var de=s(H,2),V=t(de),le=t(V);me(le),B(2),r(V);var Ae=s(V,2);{var Ne=a=>{var o=ia(),n=s(t(o),2);me(n),B(2),r(o),y(()=>n.required=e(b)),Ze(n,()=>e(h),p=>i(h,p)),u(a,o)};U(Ae,a=>{e(b)&&a(Ne)})}r(de),r(K);var ce=s(K,2),ve=t(ce),X=s(ve,2),ze=t(X);{var Ie=a=>{var o=da();u(a,o)},Le=a=>{var o=He();y(n=>v(o,`Update ${n??""}`),[()=>l(W)]),u(a,o)};U(ze,a=>{e(C)?a(Ie):a(Le,!1)})}r(X),r(ce),r(z),r(j),y((a,o,n,p)=>{v(Ee,`Update ${a??""}`),v(Se,o),v(Ue,`${n??""} Information`),v(Be,(T(d()),l(()=>d().name))),v(Te,(T(d()),l(()=>d().endpoint?.name))),v(De,(T(d()),l(()=>d().credentials_name))),v(Me,(T(d()),l(()=>d().pool_balancing_type||"roundrobin"))),X.disabled=p},[()=>l(W),()=>l(_e),()=>l(W),()=>(e(C),e(b),e(h),l(()=>e(C)||e(b)&&!e(h).trim()))]),ye(J,()=>e(_),a=>i(_,a)),Ye(le,()=>e(b),a=>i(b,a)),be("click",ve,()=>$("close")),be("submit",z,ea(Ce)),u(c,j)},$$slots:{default:!0}}),Je()}export{_a as U}; diff --git a/webapp/assets/_app/immutable/chunks/_gFYyMUN.js b/webapp/assets/_app/immutable/chunks/_gFYyMUN.js deleted file mode 100644 index a6eb6026..00000000 --- a/webapp/assets/_app/immutable/chunks/_gFYyMUN.js +++ /dev/null @@ -1 +0,0 @@ -import{F as t,G as S,u as b,H as h,S as k}from"./sWNKMed7.js";function u(r,i){return r===i||r?.[k]===i}function d(r={},i,a,T){return t(()=>{var f,s;return S(()=>{f=s,s=[],b(()=>{r!==a(...s)&&(i(r,...s),f&&u(a(...f),r)&&i(null,...f))})}),()=>{h(()=>{s&&u(a(...s),r)&&i(null,...s)})}}),r}export{d as b}; diff --git a/webapp/assets/_app/immutable/chunks/cjRLNre3.js b/webapp/assets/_app/immutable/chunks/cjRLNre3.js deleted file mode 100644 index 5c365e39..00000000 --- a/webapp/assets/_app/immutable/chunks/cjRLNre3.js +++ /dev/null @@ -1,4 +0,0 @@ -import{d,s as w,f as x}from"./DVl4ZBgx.js";import"./DsnmJJEf.js";import{i as k}from"./zNh6Oe5P.js";import{p as b,l as v,n as c,a as _,f as y,t as h,c as E,d as B,s as z,m as L,j as M,r as j,g as T,v as U}from"./sWNKMed7.js";import{p as o}from"./Ccl3fNd2.js";function A(e){if(!e)return"N/A";try{return(typeof e=="string"?new Date(e):e).toLocaleString()}catch{return"Invalid Date"}}function C(e,r="w-4 h-4"){return e==="gitea"?``:e==="github"?`
                `:` - - - `}function H(e,r){if(e.repo_name)return e.repo_name;if(e.org_name)return e.org_name;if(e.enterprise_name)return e.enterprise_name;if(e.repo_id&&!e.repo_name&&r?.repositories){const n=r.repositories.find(t=>t.id===e.repo_id);return n?`${n.owner}/${n.name}`:"Unknown Entity"}if(e.org_id&&!e.org_name&&r?.organizations){const n=r.organizations.find(t=>t.id===e.org_id);return n&&n.name?n.name:"Unknown Entity"}if(e.enterprise_id&&!e.enterprise_name&&r?.enterprises){const n=r.enterprises.find(t=>t.id===e.enterprise_id);return n&&n.name?n.name:"Unknown Entity"}return"Unknown Entity"}function P(e){return e.repo_id?"repository":e.org_id?"organization":e.enterprise_id?"enterprise":"unknown"}function V(e){return e.repo_id?d(`/repositories/${e.repo_id}`):e.org_id?d(`/organizations/${e.org_id}`):e.enterprise_id?d(`/enterprises/${e.enterprise_id}`):"#"}function W(e){e&&(e.scrollTop=e.scrollHeight)}function q(e){return{newPerPage:e,newCurrentPage:1}}function G(e){return e.pool_manager_status?.running?{text:"Running",variant:"success"}:{text:"Stopped",variant:"error"}}function J(e){switch(e.toLowerCase()){case"error":return{text:"Error",variant:"error"};case"warning":return{text:"Warning",variant:"warning"};case"info":return{text:"Info",variant:"info"};default:return{text:e,variant:"info"}}}function l(e,r,n){if(!r.trim())return e;const t=r.toLowerCase();return e.filter(a=>typeof n=="function"?n(a).toLowerCase().includes(t):n.some(i=>a[i]?.toString().toLowerCase().includes(t)))}function K(e,r){return l(e,r,["name","owner"])}function O(e,r){return l(e,r,["name"])}function Q(e,r){return l(e,r,n=>[n.name||"",n.description||"",n.endpoint?.name||""].join(" "))}function X(e,r){return l(e,r,["name","description","base_url","api_base_url"])}function Y(e,r,n){return e.slice((r-1)*n,r*n)}var I=y(" ");function Z(e,r){b(r,!1);const n=L();let t=o(r,"variant",8,"gray"),a=o(r,"size",8,"sm"),i=o(r,"text",8),g=o(r,"ring",8,!1);const u={success:"bg-green-100 dark:bg-green-900 text-green-800 dark:text-green-200",error:"bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200",warning:"bg-yellow-100 dark:bg-yellow-900 text-yellow-800 dark:text-yellow-200",info:"bg-blue-100 dark:bg-blue-900 text-blue-800 dark:text-blue-200",gray:"bg-gray-100 dark:bg-gray-700 text-gray-800 dark:text-gray-200",blue:"bg-blue-100 dark:bg-blue-900 text-blue-800 dark:text-blue-200",green:"bg-green-100 dark:bg-green-900 text-green-800 dark:text-green-200",red:"bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200",yellow:"bg-yellow-100 dark:bg-yellow-900 text-yellow-800 dark:text-yellow-200",secondary:"bg-gray-100 dark:bg-gray-700 text-gray-800 dark:text-gray-200"},f={success:"ring-green-600/20 dark:ring-green-400/30",error:"ring-red-600/20 dark:ring-red-400/30",warning:"ring-yellow-600/20 dark:ring-yellow-400/30",info:"ring-blue-600/20 dark:ring-blue-400/30",gray:"ring-gray-500/20 dark:ring-gray-400/30",blue:"ring-blue-600/20 dark:ring-blue-400/30",green:"ring-green-600/20 dark:ring-green-400/30",red:"ring-red-600/20 dark:ring-red-400/30",yellow:"ring-yellow-600/20 dark:ring-yellow-400/30",secondary:"ring-gray-500/20 dark:ring-gray-400/30"},p={sm:"px-2 py-1 text-xs",md:"px-2.5 py-0.5 text-xs"};v(()=>(c(t()),c(a()),c(g())),()=>{z(n,["inline-flex items-center rounded-full font-semibold",u[t()],p[a()],g()?`ring-1 ring-inset ${f[t()]}`:""].filter(Boolean).join(" "))}),_(),k();var s=I(),m=M(s,!0);j(s),h(()=>{w(s,1,x(T(n))),U(m,i())}),E(e,s),B()}export{Z as B,X as a,A as b,q as c,J as d,H as e,Q as f,C as g,l as h,P as i,V as j,G as k,O as l,K as m,Y as p,W as s}; diff --git a/webapp/assets/_app/immutable/chunks/kJjQhR0J.js b/webapp/assets/_app/immutable/chunks/kJjQhR0J.js deleted file mode 100644 index 5122e995..00000000 --- a/webapp/assets/_app/immutable/chunks/kJjQhR0J.js +++ /dev/null @@ -1 +0,0 @@ -import"./DsnmJJEf.js";import{i as q}from"./zNh6Oe5P.js";import{p as A,E as F,f as y,k as l,j as e,r as a,z as $,D as b,c as o,t as p,v as n,d as G}from"./sWNKMed7.js";import{p as v,i as H}from"./Ccl3fNd2.js";import{M as I}from"./DN14Fk2Y.js";import{B as w}from"./DVl4ZBgx.js";var J=y('

                '),K=y('

                ');function W(D,s){A(s,!1);let j=v(s,"title",8),M=v(s,"message",8),g=v(s,"itemName",8,""),d=v(s,"loading",8,!1);const c=F();function B(){c("confirm")}q(),I(D,{$$events:{close:()=>c("close")},children:(C,O)=>{var m=K(),f=l(e(m),2),u=e(f),P=e(u,!0);a(u);var h=l(u,2),x=e(h),z=e(x,!0);a(x);var E=l(x,2);{var L=t=>{var i=J(),r=e(i,!0);a(i),p(()=>n(r,g())),o(t,i)};H(E,t=>{g()&&t(L)})}a(h),a(f);var _=l(f,2),k=e(_);w(k,{variant:"secondary",get disabled(){return d()},$$events:{click:()=>c("close")},children:(t,i)=>{$();var r=b("Cancel");o(t,r)},$$slots:{default:!0}});var N=l(k,2);w(N,{variant:"danger",get disabled(){return d()},get loading(){return d()},$$events:{click:B},children:(t,i)=>{$();var r=b();p(()=>n(r,d()?"Deleting...":"Delete")),o(t,r)},$$slots:{default:!0}}),a(_),a(m),p(()=>{n(P,j()),n(z,M())}),o(C,m)},$$slots:{default:!0}}),G()}export{W as D}; diff --git a/webapp/assets/_app/immutable/chunks/ow_oMtSd.js b/webapp/assets/_app/immutable/chunks/ow_oMtSd.js deleted file mode 100644 index a2bd2eaf..00000000 --- a/webapp/assets/_app/immutable/chunks/ow_oMtSd.js +++ /dev/null @@ -1 +0,0 @@ -function a(e){return e?e.replace(/_/g," ").toLowerCase().split(" ").map(r=>r.charAt(0).toUpperCase()+r.slice(1)).join(" "):""}function g(e){if(!e)return"bg-gray-50 text-gray-700 ring-gray-600/20 dark:bg-gray-500/10 dark:text-gray-400 dark:ring-gray-500/20";switch(e.toLowerCase()){case"running":case"online":return"bg-green-50 text-green-700 ring-green-600/20 dark:bg-green-500/10 dark:text-green-400 dark:ring-green-500/20";case"idle":case"stopped":return"bg-blue-50 text-blue-700 ring-blue-600/20 dark:bg-blue-500/10 dark:text-blue-400 dark:ring-blue-500/20";case"active":return"bg-yellow-50 text-yellow-700 ring-yellow-600/20 dark:bg-yellow-500/10 dark:text-yellow-400 dark:ring-yellow-500/20";case"creating":case"installing":case"pending_create":case"provisioning":return"bg-purple-50 text-purple-700 ring-purple-600/20 dark:bg-purple-500/10 dark:text-purple-400 dark:ring-purple-500/20 animate-pulse";case"deleting":case"terminating":case"pending_delete":case"destroying":return"bg-orange-50 text-orange-700 ring-orange-600/20 dark:bg-orange-500/10 dark:text-orange-400 dark:ring-orange-500/20 animate-pulse";case"failed":case"error":case"terminated":case"offline":return"bg-red-50 text-red-700 ring-red-600/20 dark:bg-red-500/10 dark:text-red-400 dark:ring-red-500/20";case"pending":case"unknown":return"bg-gray-50 text-gray-700 ring-gray-600/20 dark:bg-gray-500/10 dark:text-gray-400 dark:ring-gray-500/20 animate-pulse";default:return"bg-gray-50 text-gray-700 ring-gray-600/20 dark:bg-gray-500/10 dark:text-gray-400 dark:ring-gray-500/20"}}export{a as f,g}; diff --git a/webapp/assets/_app/immutable/chunks/qfys27k5.js b/webapp/assets/_app/immutable/chunks/qfys27k5.js deleted file mode 100644 index 9b88c7b1..00000000 --- a/webapp/assets/_app/immutable/chunks/qfys27k5.js +++ /dev/null @@ -1 +0,0 @@ -import"./DsnmJJEf.js";import{i as E}from"./zNh6Oe5P.js";import{p as H,E as L,f as h,t as f,c,d as z,j as e,r as a,k as x,v as d,z as M,D as q}from"./sWNKMed7.js";import{p as i,i as C}from"./Ccl3fNd2.js";import{B as F}from"./DVl4ZBgx.js";var G=h('
                '),I=h('

                ');function S(u,t){H(t,!1);const _=L();let k=i(t,"title",8),b=i(t,"description",8),v=i(t,"actionLabel",8,null),g=i(t,"showAction",8,!0);function w(){_("action")}E();var r=I(),s=e(r),o=e(s),y=e(o,!0);a(o);var m=x(o,2),j=e(m,!0);a(m),a(s);var A=x(s,2);{var P=n=>{var l=G(),B=e(l);F(B,{variant:"primary",icon:'',$$events:{click:w},children:(D,J)=>{M();var p=q();f(()=>d(p,v())),c(D,p)},$$slots:{default:!0}}),a(l),c(n,l)};C(A,n=>{g()&&v()&&n(P)})}a(r),f(()=>{d(y,k()),d(j,b())}),c(u,r),z()}export{S as P}; diff --git a/webapp/assets/_app/immutable/chunks/sWNKMed7.js b/webapp/assets/_app/immutable/chunks/sWNKMed7.js deleted file mode 100644 index 545a4b36..00000000 --- a/webapp/assets/_app/immutable/chunks/sWNKMed7.js +++ /dev/null @@ -1,2 +0,0 @@ -var Je=Array.isArray,wn=Array.prototype.indexOf,yn=Array.from,$e=Object.defineProperty,be=Object.getOwnPropertyDescriptor,gn=Object.getOwnPropertyDescriptors,bn=Object.prototype,mn=Array.prototype,At=Object.getPrototypeOf,dt=Object.isExtensible;function Mr(e){return typeof e=="function"}const Ne=()=>{};function Lr(e){return e()}function St(e){for(var t=0;t{e=r,t=s});return{promise:n,resolve:e,reject:t}}const k=2,Qe=4,qe=8,Ae=16,Y=32,ae=64,kt=128,O=256,De=512,m=1024,P=2048,H=4096,W=8192,de=16384,et=32768,Ct=65536,ht=1<<17,Rt=1<<18,tt=1<<19,nt=1<<20,Be=1<<21,rt=1<<22,X=1<<23,Z=Symbol("$state"),Fr=Symbol("legacy props"),qr=Symbol(""),at=new class extends Error{name="StaleReactionError";message="The reaction that called `getAbortSignal()` was re-run or destroyed"},st=3,me=8;function Tn(){throw new Error("https://svelte.dev/e/await_outside_boundary")}function he(e){throw new Error("https://svelte.dev/e/lifecycle_outside_component")}function xn(){throw new Error("https://svelte.dev/e/async_derived_orphan")}function An(e){throw new Error("https://svelte.dev/e/effect_in_teardown")}function Sn(){throw new Error("https://svelte.dev/e/effect_in_unowned_derived")}function kn(e){throw new Error("https://svelte.dev/e/effect_orphan")}function Cn(){throw new Error("https://svelte.dev/e/effect_update_depth_exceeded")}function Rn(){throw new Error("https://svelte.dev/e/get_abort_signal_outside_reaction")}function On(){throw new Error("https://svelte.dev/e/hydration_failed")}function Ot(e){throw new Error("https://svelte.dev/e/lifecycle_legacy_only")}function Vr(e){throw new Error("https://svelte.dev/e/props_invalid_value")}function Nn(){throw new Error("https://svelte.dev/e/state_descriptors_fixed")}function Pn(){throw new Error("https://svelte.dev/e/state_prototype_fixed")}function Dn(){throw new Error("https://svelte.dev/e/state_unsafe_mutation")}const Yr=1,Hr=2,Ur=4,$r=8,Br=16,Wr=1,zr=2,Gr=4,Kr=8,Xr=16,Nt=1,In=2,it="[",Mn="[!",Pt="]",oe={},E=Symbol(),Zr="http://www.w3.org/1999/xhtml",Jr="@attach";function je(e){console.warn("https://svelte.dev/e/hydration_mismatch")}function Qr(){console.warn("https://svelte.dev/e/select_multiple_invalid_value")}let g=!1;function fe(e){g=e}let w;function I(e){if(e===null)throw je(),oe;return w=e}function ut(){return I(M(w))}function ea(e){if(g){if(M(w)!==null)throw je(),oe;w=e}}function ta(e=1){if(g){for(var t=e,n=w;t--;)n=M(n);w=n}}function na(){for(var e=0,t=w;;){if(t.nodeType===me){var n=t.data;if(n===Pt){if(e===0)return t;e-=1}else(n===it||n===Mn)&&(e+=1)}var r=M(t);t.remove(),t=r}}function ra(e){if(!e||e.nodeType!==me)throw je(),oe;return e.data}function Dt(e){return e===this.v}function It(e,t){return e!=e?t==t:e!==t||e!==null&&typeof e=="object"||typeof e=="function"}function aa(e,t){return e!==t}function Mt(e){return!It(e,this.v)}let Se=!1;function sa(){Se=!0}let p=null;function Ie(e){p=e}function Ln(e){return Ve().get(e)}function Fn(e,t){return Ve().set(e,t),t}function qn(e){return Ve().has(e)}function jn(){return Ve()}function Vn(e,t=!1,n){p={p,c:null,e:null,s:e,x:null,l:Se&&!t?{s:null,u:null,$:[]}:null}}function Yn(e){var t=p,n=t.e;if(n!==null){t.e=null;for(var r of n)Jt(r)}return p=t.p,{}}function ke(){return!Se||p!==null&&p.l===null}function Ve(e){return p===null&&he(),p.c??=new Map(Hn(p)||void 0)}function Hn(e){let t=e.p;for(;t!==null;){const n=t.c;if(n!==null)return n;t=t.p}return null}const Un=new WeakMap;function $n(e){var t=h;if(t===null)return d.f|=X,e;if((t.f&et)===0){if((t.f&kt)===0)throw!t.parent&&e instanceof Error&&Lt(e),e;t.b.error(e)}else lt(e,t)}function lt(e,t){for(;t!==null;){if((t.f&kt)!==0)try{t.b.error(e);return}catch(n){e=n}t=t.parent}throw e instanceof Error&&Lt(e),e}function Lt(e){const t=Un.get(e);t&&($e(e,"message",{value:t.message}),$e(e,"stack",{value:t.stack}))}const Bn=typeof requestIdleCallback>"u"?e=>setTimeout(e,1):requestIdleCallback;let Ee=[],Te=[];function Ft(){var e=Ee;Ee=[],St(e)}function qt(){var e=Te;Te=[],St(e)}function jt(e){Ee.length===0&&queueMicrotask(Ft),Ee.push(e)}function ia(e){Te.length===0&&Bn(qt),Te.push(e)}function Wn(){Ee.length>0&&Ft(),Te.length>0&&qt()}function zn(){for(var e=h.b;e!==null&&!e.has_pending_snippet();)e=e.parent;return e===null&&Tn(),e}function ft(e){var t=k|P,n=d!==null&&(d.f&k)!==0?d:null;return h===null||n!==null&&(n.f&O)!==0?t|=O:h.f|=tt,{ctx:p,deps:null,effects:null,equals:Dt,f:t,fn:e,reactions:null,rv:0,v:E,wv:0,parent:n??h,ac:null}}function Gn(e,t){let n=h;n===null&&xn();var r=n.b,s=void 0,a=ct(E),i=null,f=!d;return ar(()=>{try{var u=e()}catch(_){u=Promise.reject(_)}var l=()=>u;s=i?.then(l,l)??Promise.resolve(u),i=s;var o=y,v=r.pending;f&&(r.update_pending_count(1),v||o.increment());const c=(_,b=void 0)=>{i=null,v||o.activate(),b?b!==at&&(a.f|=X,Le(a,b)):((a.f&X)!==0&&(a.f^=X),Le(a,_)),f&&(r.update_pending_count(-1),v||o.decrement()),Ht()};if(s.then(c,_=>c(null,_||"unknown")),o)return()=>{queueMicrotask(()=>o.neuter())}}),new Promise(u=>{function l(o){function v(){o===s?u(a):l(s)}o.then(v,v)}l(s)})}function ua(e){const t=ft(e);return sn(t),t}function Kn(e){const t=ft(e);return t.equals=Mt,t}function Vt(e){var t=e.effects;if(t!==null){e.effects=null;for(var n=0;nGn(u))).then(u=>{s?.activate(),i();try{n([...e.map(r),...u])}catch(l){(a.f&de)===0&<(l,a)}s?.deactivate(),Ht()}).catch(u=>{f.error(u)})}function Jn(){var e=h,t=d,n=p;return function(){G(e),L(t),Ie(n)}}function Ht(){G(null),L(null),Ie(null)}const we=new Set;let y=null,He=null,ve=null,pt=new Set,Me=[];function Ut(){const e=Me.shift();Me.length>0&&queueMicrotask(Ut),e()}let ee=[],Ye=null,We=!1,Pe=!1;class te{current=new Map;#a=new Map;#s=new Set;#e=0;#f=null;#o=!1;#n=[];#i=[];#r=[];#t=[];#u=[];#c=[];#_=[];skipped_effects=new Set;process(t){ee=[],He=null;var n=null;if(we.size>1){n=new Map,ve=new Map;for(const[a,i]of this.current)n.set(a,{v:a.v,wv:a.wv}),a.v=i;for(const a of we)if(a!==this)for(const[i,f]of a.#a)n.has(i)||(n.set(i,{v:i.v,wv:i.wv}),i.v=f)}for(const a of t)this.#d(a);if(this.#n.length===0&&this.#e===0){this.#v();var r=this.#r,s=this.#t;this.#r=[],this.#t=[],this.#u=[],He=y,y=null,wt(r),wt(s),y===null?y=this:we.delete(this),this.#f?.resolve()}else this.#l(this.#r),this.#l(this.#t),this.#l(this.#u);if(n){for(const[a,{v:i,wv:f}]of n)a.wv<=f&&(a.v=i);ve=null}for(const a of this.#n)_e(a);for(const a of this.#i)_e(a);this.#n=[],this.#i=[]}#d(t){t.f^=m;for(var n=t.first;n!==null;){var r=n.f,s=(r&(Y|ae))!==0,a=s&&(r&m)!==0,i=a||(r&W)!==0||this.skipped_effects.has(n);if(!i&&n.fn!==null){if(s)n.f^=m;else if((r&m)===0)if((r&Qe)!==0)this.#t.push(n);else if((r&rt)!==0){var f=n.b?.pending?this.#i:this.#n;f.push(n)}else Ce(n)&&((n.f&Ae)!==0&&this.#u.push(n),_e(n));var u=n.first;if(u!==null){n=u;continue}}var l=n.parent;for(n=n.next;n===null&&l!==null;)n=l.next,l=l.parent}}#l(t){for(const n of t)((n.f&P)!==0?this.#c:this.#_).push(n),x(n,m);t.length=0}capture(t,n){this.#a.has(t)||this.#a.set(t,n),this.current.set(t,t.v)}activate(){y=this}deactivate(){y=null,He=null;for(const t of pt)if(pt.delete(t),t(),y!==null)break}neuter(){this.#o=!0}flush(){ee.length>0?ze():this.#v(),y===this&&(this.#e===0&&we.delete(this),this.deactivate())}#v(){if(!this.#o)for(const t of this.#s)t();this.#s.clear()}increment(){this.#e+=1}decrement(){if(this.#e-=1,this.#e===0){for(const t of this.#c)x(t,P),ne(t);for(const t of this.#_)x(t,H),ne(t);this.#r=[],this.#t=[],this.flush()}else this.deactivate()}add_callback(t){this.#s.add(t)}settled(){return(this.#f??=En()).promise}static ensure(){if(y===null){const t=y=new te;we.add(y),Pe||te.enqueue(()=>{y===t&&t.flush()})}return y}static enqueue(t){Me.length===0&&queueMicrotask(Ut),Me.unshift(t)}}function $t(e){var t=Pe;Pe=!0;try{var n;for(e&&(ze(),n=e());;){if(Wn(),ee.length===0&&(y?.flush(),ee.length===0))return Ye=null,n;ze()}}finally{Pe=t}}function ze(){var e=ce;We=!0;try{var t=0;for(bt(!0);ee.length>0;){var n=te.ensure();if(t++>1e3){var r,s;Qn()}n.process(ee),J.clear()}}finally{We=!1,bt(e),Ye=null}}function Qn(){try{Cn()}catch(e){lt(e,Ye)}}function wt(e){var t=e.length;if(t!==0){for(var n=0;ns&&(r.f&nt)!==0)break}}for(;nK(e))),t}function j(e,t,n=!1){d!==null&&(!D||(d.f&ht)!==0)&&ke()&&(d.f&(k|Ae|rt|ht))!==0&&!V?.includes(e)&&Dn();let r=n?ye(t):t;return Le(e,r)}function Le(e,t){if(!e.equals(t)){var n=e.v;pe?J.set(e,t):J.set(e,n),e.v=t;var r=te.ensure();r.capture(e,n),(e.f&k)!==0&&((e.f&P)!==0&&ot(e),x(e,(e.f&O)===0?m:H)),e.wv=ln(),Bt(e,P),ke()&&h!==null&&(h.f&m)!==0&&(h.f&(Y|ae))===0&&(R===null?_r([e]):R.push(e))}return t}function oa(e,t=1){var n=K(e),r=t===1?n++:n--;return j(e,n),r}function Ue(e){j(e,e.v+1)}function Bt(e,t){var n=e.reactions;if(n!==null)for(var r=ke(),s=n.length,a=0;a{if(Q===a)return f();var u=d,l=Q;L(null),Et(a);var o=f();return L(u),Et(l),o};return r&&n.set("length",$(e.length)),new Proxy(e,{defineProperty(f,u,l){(!("value"in l)||l.configurable===!1||l.enumerable===!1||l.writable===!1)&&Nn();var o=n.get(u);return o===void 0?o=i(()=>{var v=$(l.value);return n.set(u,v),v}):j(o,l.value,!0),!0},deleteProperty(f,u){var l=n.get(u);if(l===void 0){if(u in f){const o=i(()=>$(E));n.set(u,o),Ue(s)}}else j(l,E),Ue(s);return!0},get(f,u,l){if(u===Z)return e;var o=n.get(u),v=u in f;if(o===void 0&&(!v||be(f,u)?.writable)&&(o=i(()=>{var _=ye(v?f[u]:E),b=$(_);return b}),n.set(u,o)),o!==void 0){var c=K(o);return c===E?void 0:c}return Reflect.get(f,u,l)},getOwnPropertyDescriptor(f,u){var l=Reflect.getOwnPropertyDescriptor(f,u);if(l&&"value"in l){var o=n.get(u);o&&(l.value=K(o))}else if(l===void 0){var v=n.get(u),c=v?.v;if(v!==void 0&&c!==E)return{enumerable:!0,configurable:!0,value:c,writable:!0}}return l},has(f,u){if(u===Z)return!0;var l=n.get(u),o=l!==void 0&&l.v!==E||Reflect.has(f,u);if(l!==void 0||h!==null&&(!o||be(f,u)?.writable)){l===void 0&&(l=i(()=>{var c=o?ye(f[u]):E,_=$(c);return _}),n.set(u,l));var v=K(l);if(v===E)return!1}return o},set(f,u,l,o){var v=n.get(u),c=u in f;if(r&&u==="length")for(var _=l;_$(E)),n.set(_+"",b))}if(v===void 0)(!c||be(f,u)?.writable)&&(v=i(()=>$(void 0)),j(v,ye(l)),n.set(u,v));else{c=v.v!==E;var q=i(()=>ye(l));j(v,q)}var Re=Reflect.getOwnPropertyDescriptor(f,u);if(Re?.set&&Re.set.call(o,l),!c){if(r&&typeof u=="string"){var Oe=n.get("length"),U=Number(u);Number.isInteger(U)&&U>=Oe.v&&j(Oe,U+1)}Ue(s)}return!0},ownKeys(f){K(s);var u=Reflect.ownKeys(f).filter(v=>{var c=n.get(v);return c===void 0||c.v!==E});for(var[l,o]of n)o.v!==E&&!(l in f)&&u.push(l);return u},setPrototypeOf(){Pn()}})}function yt(e){try{if(e!==null&&typeof e=="object"&&Z in e)return e[Z]}catch{}return e}function ca(e,t){return Object.is(yt(e),yt(t))}var gt,er,Wt,zt,Gt;function Ge(){if(gt===void 0){gt=window,er=document,Wt=/Firefox/.test(navigator.userAgent);var e=Element.prototype,t=Node.prototype,n=Text.prototype;zt=be(t,"firstChild").get,Gt=be(t,"nextSibling").get,dt(e)&&(e.__click=void 0,e.__className=void 0,e.__attributes=null,e.__style=void 0,e.__e=void 0),dt(n)&&(n.__t=void 0)}}function z(e=""){return document.createTextNode(e)}function A(e){return zt.call(e)}function M(e){return Gt.call(e)}function _a(e,t){if(!g)return A(e);var n=A(w);if(n===null)n=w.appendChild(z());else if(t&&n.nodeType!==st){var r=z();return n?.before(r),I(r),r}return I(n),n}function va(e,t){if(!g){var n=A(e);return n instanceof Comment&&n.data===""?M(n):n}return w}function da(e,t=1,n=!1){let r=g?w:e;for(var s;t--;)s=r,r=M(r);if(!g)return r;if(n&&r?.nodeType!==st){var a=z();return r===null?s?.after(a):r.before(a),I(a),a}return I(r),r}function Kt(e){e.textContent=""}function ha(){return!1}function Xt(e){h===null&&d===null&&kn(),d!==null&&(d.f&O)!==0&&h===null&&Sn(),pe&&An()}function tr(e,t){var n=t.last;n===null?t.last=t.first=e:(n.next=e,e.prev=n,t.last=e)}function F(e,t,n,r=!0){var s=h;s!==null&&(s.f&W)!==0&&(e|=W);var a={ctx:p,deps:null,nodes_start:null,nodes_end:null,f:e|P,first:null,fn:t,last:null,next:null,parent:s,b:s&&s.b,prev:null,teardown:null,transitions:null,wv:0,ac:null};if(n)try{_e(a),a.f|=et}catch(u){throw re(a),u}else t!==null&&ne(a);var i=n&&a.deps===null&&a.first===null&&a.nodes_start===null&&a.teardown===null&&(a.f&tt)===0;if(!i&&r&&(s!==null&&tr(a,s),d!==null&&(d.f&k)!==0&&(e&ae)===0)){var f=d;(f.effects??=[]).push(a)}return a}function Zt(e){const t=F(qe,null,!1);return x(t,m),t.teardown=e,t}function nr(e){Xt();var t=h.f,n=!d&&(t&Y)!==0&&(t&et)===0;if(n){var r=p;(r.e??=[]).push(e)}else return Jt(e)}function Jt(e){return F(Qe|nt,e,!1)}function pa(e){return Xt(),F(qe|nt,e,!0)}function rr(e){te.ensure();const t=F(ae,e,!0);return(n={})=>new Promise(r=>{n.outro?fr(t,()=>{re(t),r(void 0)}):(re(t),r(void 0))})}function wa(e){return F(Qe,e,!1)}function ya(e,t){var n=p,r={effect:null,ran:!1,deps:e};n.l.$.push(r),r.effect=Qt(()=>{e(),!r.ran&&(r.ran=!0,se(t))})}function ga(){var e=p;Qt(()=>{for(var t of e.l.$){t.deps();var n=t.effect;(n.f&m)!==0&&x(n,H),Ce(n)&&_e(n),t.ran=!1}})}function ar(e){return F(rt|tt,e,!0)}function Qt(e,t=0){return F(qe|t,e,!0)}function ba(e,t=[],n=[]){Zn(t,n,r=>{F(qe,()=>e(...r.map(K)),!0)})}function sr(e,t=0){var n=F(Ae|t,e,!0);return n}function ir(e,t=!0){return F(Y,e,!0,t)}function en(e){var t=e.teardown;if(t!==null){const n=pe,r=d;mt(!0),L(null);try{t.call(null)}finally{mt(n),L(r)}}}function tn(e,t=!1){var n=e.first;for(e.first=e.last=null;n!==null;){n.ac?.abort(at);var r=n.next;(n.f&ae)!==0?n.parent=null:re(n,t),n=r}}function ur(e){for(var t=e.first;t!==null;){var n=t.next;(t.f&Y)===0&&re(t),t=n}}function re(e,t=!0){var n=!1;(t||(e.f&Rt)!==0)&&e.nodes_start!==null&&e.nodes_end!==null&&(lr(e.nodes_start,e.nodes_end),n=!0),tn(e,t&&!n),Fe(e,0),x(e,de);var r=e.transitions;if(r!==null)for(const a of r)a.stop();en(e);var s=e.parent;s!==null&&s.first!==null&&nn(e),e.next=e.prev=e.teardown=e.ctx=e.deps=e.fn=e.nodes_start=e.nodes_end=e.ac=null}function lr(e,t){for(;e!==null;){var n=e===t?null:M(e);e.remove(),e=n}}function nn(e){var t=e.parent,n=e.prev,r=e.next;n!==null&&(n.next=r),r!==null&&(r.prev=n),t!==null&&(t.first===e&&(t.first=r),t.last===e&&(t.last=n))}function fr(e,t){var n=[];rn(e,n,!0),or(n,()=>{re(e),t&&t()})}function or(e,t){var n=e.length;if(n>0){var r=()=>--n||t();for(var s of e)s.out(r)}else t()}function rn(e,t,n){if((e.f&W)===0){if(e.f^=W,e.transitions!==null)for(const i of e.transitions)(i.is_global||n)&&t.push(i);for(var r=e.first;r!==null;){var s=r.next,a=(r.f&Ct)!==0||(r.f&Y)!==0;rn(r,t,a?n:!1),r=s}}}function ma(e){an(e,!0)}function an(e,t){if((e.f&W)!==0){e.f^=W,(e.f&m)===0&&(x(e,P),ne(e));for(var n=e.first;n!==null;){var r=n.next,s=(n.f&Ct)!==0||(n.f&Y)!==0;an(n,s?t:!1),n=r}if(e.transitions!==null)for(const a of e.transitions)(a.is_global||t)&&a.in()}}let le=null;function cr(e){var t=le;try{if(le=new Set,se(e),t!==null)for(var n of le)t.add(n);return le}finally{le=t}}function Ea(e){for(var t of cr(e))Le(t,t.v)}let ce=!1;function bt(e){ce=e}let pe=!1;function mt(e){pe=e}let d=null,D=!1;function L(e){d=e}let h=null;function G(e){h=e}let V=null;function sn(e){d!==null&&(V===null?V=[e]:V.push(e))}let T=null,S=0,R=null;function _r(e){R=e}let un=1,xe=0,Q=xe;function Et(e){Q=e}let B=!1;function ln(){return++un}function Ce(e){var t=e.f;if((t&P)!==0)return!0;if((t&H)!==0){var n=e.deps,r=(t&O)!==0;if(n!==null){var s,a,i=(t&De)!==0,f=r&&h!==null&&!B,u=n.length;if((i||f)&&(h===null||(h.f&de)===0)){var l=e,o=l.parent;for(s=0;se.wv)return!0}(!r||h!==null&&!B)&&x(e,m)}return!1}function fn(e,t,n=!0){var r=e.reactions;if(r!==null&&!V?.includes(e))for(var s=0;s0)for(c.length=S+T.length,_=0;_{document.activeElement===n&&e.focus()})}}function Ca(e){g&&A(e)!==null&&Kt(e)}let Tt=!1;function mr(){Tt||(Tt=!0,document.addEventListener("reset",e=>{Promise.resolve().then(()=>{if(!e.defaultPrevented)for(const t of e.target.elements)t.__on_r?.()})},{capture:!0}))}function _n(e){var t=d,n=h;L(null),G(null);try{return e()}finally{L(t),G(n)}}function Ra(e,t,n,r=n){e.addEventListener(t,()=>_n(n));const s=e.__on_r;s?e.__on_r=()=>{s(),r(!0)}:e.__on_r=()=>r(!0),mr()}const vn=new Set,Xe=new Set;function Er(e,t,n,r={}){function s(a){if(r.capture||ge.call(t,a),!a.cancelBubble)return _n(()=>n?.call(this,a))}return e.startsWith("pointer")||e.startsWith("touch")||e==="wheel"?jt(()=>{t.addEventListener(e,s,r)}):t.addEventListener(e,s,r),s}function Oa(e,t,n,r,s){var a={capture:r,passive:s},i=Er(e,t,n,a);(t===document.body||t===window||t===document||t instanceof HTMLMediaElement)&&Zt(()=>{t.removeEventListener(e,i,a)})}function Na(e){for(var t=0;t{throw U});throw c}}finally{e.__root=t,delete e.currentTarget,L(o),G(v)}}}let C;function Tr(){C=void 0}function Pa(e){let t=null,n=g;var r;if(g){for(t=w,C===void 0&&(C=A(document.head));C!==null&&(C.nodeType!==me||C.data!==it);)C=M(C);C===null?fe(!1):C=I(M(C))}g||(r=document.head.appendChild(z()));try{sr(()=>e(r),Rt)}finally{n&&(fe(!0),C=w,I(t))}}function _t(e){var t=document.createElement("template");return t.innerHTML=e.replaceAll("",""),t.content}function N(e,t){var n=h;n.nodes_start===null&&(n.nodes_start=e,n.nodes_end=t)}function Da(e,t){var n=(t&Nt)!==0,r=(t&In)!==0,s,a=!e.startsWith("");return()=>{if(g)return N(w,null),w;s===void 0&&(s=_t(a?e:""+e),n||(s=A(s)));var i=r||Wt?document.importNode(s,!0):s.cloneNode(!0);if(n){var f=A(i),u=i.lastChild;N(f,u)}else N(i,i);return i}}function xr(e,t,n="svg"){var r=!e.startsWith(""),s=(t&Nt)!==0,a=`<${n}>${r?e:""+e}`,i;return()=>{if(g)return N(w,null),w;if(!i){var f=_t(a),u=A(f);if(s)for(i=document.createDocumentFragment();A(u);)i.appendChild(A(u));else i=A(u)}var l=i.cloneNode(!0);if(s){var o=A(l),v=l.lastChild;N(o,v)}else N(l,l);return l}}function Ia(e,t){return xr(e,t,"svg")}function Ma(e=""){if(!g){var t=z(e+"");return N(t,t),t}var n=w;return n.nodeType!==st&&(n.before(n=z()),I(n)),N(n,n),n}function La(){if(g)return N(w,null),w;var e=document.createDocumentFragment(),t=document.createComment(""),n=z();return e.append(t,n),N(t,n),e}function Fa(e,t){if(g){h.nodes_end=w,ut();return}e!==null&&e.before(t)}function qa(e,t){var n=t==null?"":typeof t=="object"?t+"":t;n!==(e.__t??=e.nodeValue)&&(e.__t=n,e.nodeValue=n+"")}function dn(e,t){return hn(e,t)}function Ar(e,t){Ge(),t.intro=t.intro??!1;const n=t.target,r=g,s=w;try{for(var a=A(n);a&&(a.nodeType!==me||a.data!==it);)a=M(a);if(!a)throw oe;fe(!0),I(a),ut();const i=hn(e,{...t,anchor:a});if(w===null||w.nodeType!==me||w.data!==Pt)throw je(),oe;return fe(!1),i}catch(i){if(i instanceof Error&&i.message.split(` -`).some(f=>f.startsWith("https://svelte.dev/e/")))throw i;return i!==oe&&console.warn("Failed to hydrate: ",i),t.recover===!1&&On(),Ge(),Kt(n),fe(!1),dn(e,t)}finally{fe(r),I(s),Tr()}}const ie=new Map;function hn(e,{target:t,anchor:n,props:r={},events:s,context:a,intro:i=!0}){Ge();var f=new Set,u=v=>{for(var c=0;c{var v=n??t.appendChild(z());return ir(()=>{if(a){Vn({});var c=p;c.c=a}s&&(r.$$events=s),g&&N(v,null),l=e(v,r)||{},g&&(h.nodes_end=w),a&&Yn()}),()=>{for(var c of f){t.removeEventListener(c,ge);var _=ie.get(c);--_===0?(document.removeEventListener(c,ge),ie.delete(c)):ie.set(c,_)}Xe.delete(u),v!==n&&v.parentNode?.removeChild(v)}});return Ze.set(l,o),l}let Ze=new WeakMap;function Sr(e,t){const n=Ze.get(e);return n?(Ze.delete(e),n(t)):Promise.resolve()}function kr(e){return(t,...n)=>{var r=e(...n),s;if(g)s=w,ut();else{var a=r.render().trim(),i=_t(a);s=A(i),t.before(s)}const f=r.setup?.(s);N(s,s),typeof f=="function"&&Zt(f)}}function Cr(e,t,n){if(e==null)return t(void 0),Ne;const r=se(()=>e.subscribe(t,n));return r.unsubscribe?()=>r.unsubscribe():r}const ue=[];function ja(e,t=Ne){let n=null;const r=new Set;function s(f){if(It(e,f)&&(e=f,n)){const u=!ue.length;for(const l of r)l[1](),ue.push(l,e);if(u){for(let l=0;l{r.delete(l),r.size===0&&n&&(n(),n=null)}}return{set:s,update:a,subscribe:i}}function Va(e){let t;return Cr(e,n=>t=n)(),t}function Rr(){return d===null&&Rn(),(d.ac??=new AbortController).signal}function pn(e){p===null&&he(),Se&&p.l!==null?vt(p).m.push(e):nr(()=>{const t=se(e);if(typeof t=="function")return t})}function Or(e){p===null&&he(),pn(()=>()=>se(e))}function Nr(e,t,{bubbles:n=!1,cancelable:r=!1}={}){return new CustomEvent(e,{detail:t,bubbles:n,cancelable:r})}function Pr(){const e=p;return e===null&&he(),(t,n,r)=>{const s=e.s.$$events?.[t];if(s){const a=Je(s)?s.slice():[s],i=Nr(t,n,r);for(const f of a)f.call(e.x,i);return!i.defaultPrevented}return!0}}function Dr(e){p===null&&he(),p.l===null&&Ot(),vt(p).b.push(e)}function Ir(e){p===null&&he(),p.l===null&&Ot(),vt(p).a.push(e)}function vt(e){var t=e.l;return t.u??={a:[],b:[],m:[]}}const Ya=Object.freeze(Object.defineProperty({__proto__:null,afterUpdate:Ir,beforeUpdate:Dr,createEventDispatcher:Pr,createRawSnippet:kr,flushSync:$t,getAbortSignal:Rr,getAllContexts:jn,getContext:Ln,hasContext:qn,hydrate:Ar,mount:dn,onDestroy:Or,onMount:pn,setContext:Fn,settled:hr,tick:dr,unmount:Sr,untrack:se},Symbol.toStringTag,{value:"Module"}));export{er as $,Or as A,La as B,Ia as C,Ma as D,Pr as E,wa as F,Qt as G,jt as H,ja as I,Va as J,g as K,ut as L,sr as M,Ct as N,z as O,ir as P,y as Q,ha as R,Z as S,w as T,fr as U,ke as V,Ra as W,He as X,I as Y,A as Z,ra as _,ga as a,Qr as a$,Mn as a0,na as a1,fe as a2,me as a3,Pt as a4,Le as a5,ct as a6,yn as a7,Je as a8,Hr as a9,Gr as aA,ye as aB,de as aC,Kr as aD,Se as aE,zr as aF,Wr as aG,oa as aH,G as aI,Xr as aJ,pe as aK,Fr as aL,Mr as aM,Ar as aN,dn as aO,$t as aP,Sr as aQ,$ as aR,dr as aS,ua as aT,aa as aU,It as aV,lr as aW,je as aX,oe as aY,N as aZ,_t as a_,ma as aa,Yr as ab,Br as ac,W as ad,re as ae,M as af,rn as ag,Kt as ah,or as ai,h as aj,Ur as ak,$r as al,p as am,pa as an,nr as ao,Lr as ap,St as aq,ft as ar,sa as as,E as at,Ne as au,Cr as av,Zt as aw,$e as ax,be as ay,Vr as az,va as b,ca as b0,Zn as b1,qr as b2,Zr as b3,At as b4,Jr as b5,gn as b6,xa as b7,Er as b8,Na as b9,ka as ba,Sa as bb,ia as bc,mr as bd,Aa as be,Ya as bf,Fa as c,Yn as d,Oa as e,Da as f,K as g,Pa as h,gt as i,_a as j,da as k,ya as l,la as m,Ta as n,pn as o,Vn as p,Kn as q,ea as r,j as s,ba as t,se as u,qa as v,Ca as w,Ea as x,fa as y,ta as z}; diff --git a/webapp/assets/_app/immutable/chunks/vrFkfzoI.js b/webapp/assets/_app/immutable/chunks/vrFkfzoI.js deleted file mode 100644 index 642f92ae..00000000 --- a/webapp/assets/_app/immutable/chunks/vrFkfzoI.js +++ /dev/null @@ -1 +0,0 @@ -import"./DsnmJJEf.js";import{i as dr}from"./zNh6Oe5P.js";import{p as or,E as sr,m as i,o as ir,s,f as le,j as d,r as o,k as t,g as r,n as c,u as m,t as f,x as Ge,z as nr,v as _,e as je,c as l,D as R,B as Be,b as $e,d as lr}from"./sWNKMed7.js";import{p as ur,i as E}from"./Ccl3fNd2.js";import{r as b,b as ze}from"./DVl4ZBgx.js";import{b as v,a as br}from"./CLagxtgo.js";import{p as gr}from"./D4Caz1gY.js";import{M as cr}from"./DN14Fk2Y.js";import{e as mr}from"./BZiHL9L3.js";import{J as vr}from"./CkYhV7Br.js";var pr=le('

                '),fr=le('
                Updating...
                '),xr=le('

                Scale Set Information

                Provider:
                Entity:

                Image & OS Configuration

                Runner Limits & Timing

                Advanced Settings

                Extra Specs (JSON)
                ');function Jr(Ce,ue){or(ue,!1);let e=ur(ue,"scaleSet",8);const $=sr();let O=i(!1),I=i(""),J=i(e().name||""),T=i(e().image||""),A=i(e().flavor||""),M=i(e().max_runners),N=i(e().min_idle_runners),P=i(e().runner_bootstrap_timeout),U=i(e().runner_prefix||""),h=i(e().os_type||"linux"),k=i(e().os_arch||"amd64"),D=i(e()["github-runner-group"]||""),G=i(e().enabled),g=i("{}");ir(()=>{if(e().extra_specs)try{if(typeof e().extra_specs=="object")s(g,JSON.stringify(e().extra_specs,null,2));else{const u=JSON.parse(e().extra_specs);s(g,JSON.stringify(u,null,2))}}catch{s(g,e().extra_specs||"{}")}});async function Le(){try{s(O,!0),s(I,"");let u={};if(r(g).trim())try{u=JSON.parse(r(g))}catch{throw new Error("Invalid JSON in extra specs")}const w={name:r(J)!==e().name?r(J):void 0,image:r(T)!==e().image?r(T):void 0,flavor:r(A)!==e().flavor?r(A):void 0,max_runners:r(M)!==e().max_runners?r(M):void 0,min_idle_runners:r(N)!==e().min_idle_runners?r(N):void 0,runner_bootstrap_timeout:r(P)!==e().runner_bootstrap_timeout?r(P):void 0,runner_prefix:r(U)!==e().runner_prefix?r(U):void 0,os_type:r(h)!==e().os_type?r(h):void 0,os_arch:r(k)!==e().os_arch?r(k):void 0,"github-runner-group":r(D)!==e()["github-runner-group"]&&r(D)||void 0,enabled:r(G)!==e().enabled?r(G):void 0,extra_specs:r(g).trim()!==JSON.stringify(e().extra_specs||{},null,2).trim()?u:void 0};Object.keys(w).forEach(p=>{w[p]===void 0&&delete w[p]}),$("submit",w)}catch(u){s(I,mr(u))}finally{s(O,!1)}}dr(),cr(Ce,{$$events:{close:()=>$("close")},children:(u,w)=>{var p=xr(),z=d(p),be=d(z),Fe=d(be);o(be),o(z);var C=t(z,2),ge=d(C);{var He=a=>{var n=pr(),j=d(n),ne=d(j,!0);o(j),o(n),f(()=>_(ne,r(I))),l(a,n)};E(ge,a=>{r(I)&&a(He)})}var L=t(ge,2),ce=t(d(L),2),F=d(ce),me=t(d(F),2),We=d(me,!0);o(me),o(F);var ve=t(F,2),pe=t(d(ve),2),qe=d(pe);{var Ke=a=>{var n=R();f(()=>_(n,`Repository: ${c(e()),m(()=>e().repo_name)??""}`)),l(a,n)},Qe=a=>{var n=Be(),j=$e(n);{var ne=x=>{var S=R();f(()=>_(S,`Organization: ${c(e()),m(()=>e().org_name)??""}`)),l(x,S)},er=x=>{var S=Be(),rr=$e(S);{var ar=y=>{var B=R();f(()=>_(B,`Enterprise: ${c(e()),m(()=>e().enterprise_name)??""}`)),l(y,B)},tr=y=>{var B=R("Unknown Entity");l(y,B)};E(rr,y=>{c(e()),m(()=>e().enterprise_name)?y(ar):y(tr,!1)},!0)}l(x,S)};E(j,x=>{c(e()),m(()=>e().org_name)?x(ne):x(er,!1)},!0)}l(a,n)};E(qe,a=>{c(e()),m(()=>e().repo_name)?a(Ke):a(Qe,!1)})}o(pe),o(ve),o(ce),o(L);var H=t(L,2),fe=t(d(H),2);b(fe),o(H);var W=t(H,2),xe=t(d(W),2),q=d(xe),ye=t(d(q),2);b(ye),o(q);var K=t(q,2),_e=t(d(K),2);b(_e),o(K);var Q=t(K,2),V=t(d(Q),2);f(()=>{r(h),Ge(()=>{})});var X=d(V);X.value=X.__value="linux";var he=t(X);he.value=he.__value="windows",o(V),o(Q);var ke=t(Q,2),Y=t(d(ke),2);f(()=>{r(k),Ge(()=>{})});var Z=d(Y);Z.value=Z.__value="amd64";var we=t(Z);we.value=we.__value="arm64",o(Y),o(ke),o(xe),o(W);var ee=t(W,2),Se=t(d(ee),2),re=d(Se),Re=t(d(re),2);b(Re),o(re);var ae=t(re,2),Ee=t(d(ae),2);b(Ee),o(ae);var Oe=t(ae,2),Ie=t(d(Oe),2);b(Ie),o(Oe),o(Se),o(ee);var te=t(ee,2),de=t(d(te),2),oe=d(de),Je=t(d(oe),2);b(Je),o(oe);var Te=t(oe,2),Ae=t(d(Te),2);b(Ae),o(Te),o(de);var se=t(de,2),Me=d(se),Ve=t(d(Me),2);vr(Ve,{rows:4,placeholder:"{}",get value(){return r(g)},set value(a){s(g,a)},$$legacy:!0}),o(Me),o(se);var Ne=t(se,2),Pe=d(Ne);b(Pe),nr(2),o(Ne),o(te);var Ue=t(te,2),De=d(Ue),ie=t(De,2),Xe=d(ie);{var Ye=a=>{var n=fr();l(a,n)},Ze=a=>{var n=R("Update Scale Set");l(a,n)};E(Xe,a=>{r(O)?a(Ye):a(Ze,!1)})}o(ie),o(Ue),o(C),o(p),f(()=>{_(Fe,`Update Scale Set ${c(e()),m(()=>e().name)??""}`),_(We,(c(e()),m(()=>e().provider_name))),ie.disabled=r(O)}),v(fe,()=>r(J),a=>s(J,a)),v(ye,()=>r(T),a=>s(T,a)),v(_e,()=>r(A),a=>s(A,a)),ze(V,()=>r(h),a=>s(h,a)),ze(Y,()=>r(k),a=>s(k,a)),v(Re,()=>r(N),a=>s(N,a)),v(Ee,()=>r(M),a=>s(M,a)),v(Ie,()=>r(P),a=>s(P,a)),v(Je,()=>r(U),a=>s(U,a)),v(Ae,()=>r(D),a=>s(D,a)),br(Pe,()=>r(G),a=>s(G,a)),je("click",De,()=>$("close")),je("submit",C,gr(Le)),l(u,p)},$$slots:{default:!0}}),lr()}export{Jr as U}; diff --git a/webapp/assets/_app/immutable/chunks/z0u7Z3zm.js b/webapp/assets/_app/immutable/chunks/z0u7Z3zm.js deleted file mode 100644 index e3e95e0c..00000000 --- a/webapp/assets/_app/immutable/chunks/z0u7Z3zm.js +++ /dev/null @@ -1 +0,0 @@ -import"./DsnmJJEf.js";import{i as j}from"./zNh6Oe5P.js";import{p as E,E as G,f as S,j as t,r,k as g,u,n as p,z as m,t as z,v as D,e as f,c as H,d as I}from"./sWNKMed7.js";import{h as y,s as v}from"./DVl4ZBgx.js";import{p as h}from"./Ccl3fNd2.js";import{g as o}from"./cjRLNre3.js";var q=S('
                ');function M(x,s){E(s,!1);const k=G();let d=h(s,"selectedForgeType",12,""),_=h(s,"label",8,"Select Forge Type");function n(c){d(c),k("select",c)}j();var i=q(),l=t(i),F=t(l,!0);r(l);var b=g(l,2),e=t(b),w=t(e);y(w,()=>(p(o),u(()=>o("github","w-8 h-8")))),m(2),r(e);var a=g(e,2),T=t(a);y(T,()=>(p(o),u(()=>o("gitea","w-8 h-8")))),m(2),r(a),r(b),r(i),z(()=>{D(F,_()),v(e,1,`flex flex-col items-center justify-center p-6 border-2 rounded-lg transition-colors cursor-pointer ${d()==="github"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),v(a,1,`flex flex-col items-center justify-center p-6 border-2 rounded-lg transition-colors cursor-pointer ${d()==="gitea"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`)}),f("click",e,()=>n("github")),f("click",a,()=>n("gitea")),H(x,i),I()}export{M as F}; diff --git a/webapp/assets/_app/immutable/chunks/zNh6Oe5P.js b/webapp/assets/_app/immutable/chunks/zNh6Oe5P.js deleted file mode 100644 index 20c6cf21..00000000 --- a/webapp/assets/_app/immutable/chunks/zNh6Oe5P.js +++ /dev/null @@ -1 +0,0 @@ -import{am as g,an as d,ao as c,u as m,ap as b,aq as i,g as p,n as v,ar as h,as as k}from"./sWNKMed7.js";function x(n=!1){const s=g,e=s.l.u;if(!e)return;let f=()=>v(s.s);if(n){let t=0,a={};const _=h(()=>{let l=!1;const r=s.s;for(const o in r)r[o]!==a[o]&&(a[o]=r[o],l=!0);return l&&t++,t});f=()=>p(_)}e.b.length&&d(()=>{u(s,f),i(e.b)}),c(()=>{const t=m(()=>e.m.map(b));return()=>{for(const a of t)typeof a=="function"&&a()}}),e.a.length&&c(()=>{u(s,f),i(e.a)})}function u(n,s){if(n.l.s)for(const e of n.l.s)p(e);s()}k();export{x as i}; diff --git a/webapp/assets/_app/immutable/entry/app.jX_3SVcZ.js b/webapp/assets/_app/immutable/entry/app.jX_3SVcZ.js deleted file mode 100644 index 218f2adf..00000000 --- a/webapp/assets/_app/immutable/entry/app.jX_3SVcZ.js +++ /dev/null @@ -1,2 +0,0 @@ -const __vite__mapDeps=(i,m=__vite__mapDeps,d=(m.f||(m.f=["../nodes/0.CCd664k7.js","../chunks/DsnmJJEf.js","../chunks/zNh6Oe5P.js","../chunks/sWNKMed7.js","../chunks/Ccl3fNd2.js","../chunks/DVl4ZBgx.js","../chunks/C6O4o7G1.js","../chunks/CJwphPxi.js","../chunks/CPCsbdkz.js","../chunks/CVQRp8zk.js","../chunks/BuuPrWMc.js","../chunks/BZUCTtPY.js","../assets/0.srAxWR-A.css","../nodes/1.CyFJoHvz.js","../nodes/2.XEoeauIa.js","../chunks/DA-798Ko.js","../chunks/CLagxtgo.js","../chunks/D4Caz1gY.js","../chunks/DN14Fk2Y.js","../chunks/BZiHL9L3.js","../nodes/3.oB-RZdV5.js","../chunks/qfys27k5.js","../chunks/z0u7Z3zm.js","../chunks/cjRLNre3.js","../chunks/DCYYzf48.js","../chunks/CCYOsezl.js","../chunks/CARsAFuo.js","../chunks/BJXodF8n.js","../chunks/ow_oMtSd.js","../nodes/4.DDn9he0o.js","../nodes/5.A7yiJpQp.js","../chunks/XzGG0o_q.js","../chunks/kJjQhR0J.js","../chunks/DAWfW-VQ.js","../nodes/6.ZyhHjSpx.js","../chunks/CaVdfWt-.js","../chunks/_gFYyMUN.js","../chunks/0ZGtv6cq.js","../chunks/BRFhz4VJ.js","../chunks/CkYhV7Br.js","../nodes/7.CzX45v88.js","../nodes/8.BUYkNug1.js","../nodes/9.D2F8Tae4.js","../nodes/10.BcDuisC2.js","../nodes/11.BtC3ypSa.js","../nodes/12.PNl__Wik.js","../chunks/Clig3Vwb.js","../nodes/13.kIWXAcC-.js","../chunks/BOoHOAHB.js","../chunks/BaVaT5nU.js","../nodes/14.BAdXPegi.js","../nodes/15.CAiEMQ9A.js","../nodes/16.DBIP7RdC.js","../nodes/17.BttmUS7o.js","../chunks/vrFkfzoI.js","../nodes/18.Cp7csezF.js"])))=>i.map(i=>d[i]); -import{s as A,aL as z,g as f,aN as U,aO as G,aP as Q,ax as W,aQ as Y,m as F,p as H,an as J,ao as K,o as X,aR as b,aS as Z,f as C,b as L,k as $,c as g,d as tt,B as T,j as et,r as rt,aT as D,D as st,t as ot,v as at}from"../chunks/sWNKMed7.js";import"../chunks/DsnmJJEf.js";import{p as I,i as V}from"../chunks/Ccl3fNd2.js";import{c as w}from"../chunks/CCYOsezl.js";import{b as k}from"../chunks/_gFYyMUN.js";function nt(c){return class extends it{constructor(t){super({component:c,...t})}}}class it{#e;#t;constructor(t){var a=new Map,u=(r,e)=>{var s=F(e,!1,!1);return a.set(r,s),s};const l=new Proxy({...t.props||{},$$events:{}},{get(r,e){return f(a.get(e)??u(e,Reflect.get(r,e)))},has(r,e){return e===z?!0:(f(a.get(e)??u(e,Reflect.get(r,e))),Reflect.has(r,e))},set(r,e,s){return A(a.get(e)??u(e,s),s),Reflect.set(r,e,s)}});this.#t=(t.hydrate?U:G)(t.component,{target:t.target,anchor:t.anchor,props:l,context:t.context,intro:t.intro??!1,recover:t.recover}),(!t?.props?.$$host||t.sync===!1)&&Q(),this.#e=l.$$events;for(const r of Object.keys(this.#t))r==="$set"||r==="$destroy"||r==="$on"||W(this,r,{get(){return this.#t[r]},set(e){this.#t[r]=e},enumerable:!0});this.#t.$set=r=>{Object.assign(l,r)},this.#t.$destroy=()=>{Y(this.#t)}}$set(t){this.#t.$set(t)}$on(t,a){this.#e[t]=this.#e[t]||[];const u=(...l)=>a.call(this,...l);return this.#e[t].push(u),()=>{this.#e[t]=this.#e[t].filter(l=>l!==u)}}$destroy(){this.#t.$destroy()}}const ct="modulepreload",ut=function(c,t){return new URL(c,t).href},j={},o=function(t,a,u){let l=Promise.resolve();if(a&&a.length>0){let O=function(i){return Promise.all(i.map(d=>Promise.resolve(d).then(v=>({status:"fulfilled",value:v}),v=>({status:"rejected",reason:v}))))};const e=document.getElementsByTagName("link"),s=document.querySelector("meta[property=csp-nonce]"),y=s?.nonce||s?.getAttribute("nonce");l=O(a.map(i=>{if(i=ut(i,u),i in j)return;j[i]=!0;const d=i.endsWith(".css"),v=d?'[rel="stylesheet"]':"";if(!!u)for(let n=e.length-1;n>=0;n--){const _=e[n];if(_.href===i&&(!d||_.rel==="stylesheet"))return}else if(document.querySelector(`link[href="${i}"]${v}`))return;const m=document.createElement("link");if(m.rel=d?"stylesheet":ct,d||(m.as="script"),m.crossOrigin="",m.href=i,y&&m.setAttribute("nonce",y),document.head.appendChild(m),d)return new Promise((n,_)=>{m.addEventListener("load",n),m.addEventListener("error",()=>_(new Error(`Unable to preload CSS for ${i}`)))})}))}function r(e){const s=new Event("vite:preloadError",{cancelable:!0});if(s.payload=e,window.dispatchEvent(s),!s.defaultPrevented)throw e}return l.then(e=>{for(const s of e||[])s.status==="rejected"&&r(s.reason);return t().catch(r)})},Rt={};var lt=C('
                '),_t=C(" ",1);function mt(c,t){H(t,!0);let a=I(t,"components",23,()=>[]),u=I(t,"data_0",3,null),l=I(t,"data_1",3,null);J(()=>t.stores.page.set(t.page)),K(()=>{t.stores,t.page,t.constructors,a(),t.form,u(),l(),t.stores.page.notify()});let r=b(!1),e=b(!1),s=b(null);X(()=>{const n=t.stores.page.subscribe(()=>{f(r)&&(A(e,!0),Z().then(()=>{A(s,document.title||"untitled page",!0)}))});return A(r,!0),n});const y=D(()=>t.constructors[1]);var O=_t(),i=L(O);{var d=n=>{const _=D(()=>t.constructors[0]);var h=T(),P=L(h);w(P,()=>f(_),(E,p)=>{k(p(E,{get data(){return u()},get form(){return t.form},get params(){return t.page.params},children:(R,vt)=>{var S=T(),B=L(S);w(B,()=>f(y),(N,M)=>{k(M(N,{get data(){return l()},get form(){return t.form},get params(){return t.page.params}}),q=>a()[1]=q,()=>a()?.[1])}),g(R,S)},$$slots:{default:!0}}),R=>a()[0]=R,()=>a()?.[0])}),g(n,h)},v=n=>{const _=D(()=>t.constructors[0]);var h=T(),P=L(h);w(P,()=>f(_),(E,p)=>{k(p(E,{get data(){return u()},get form(){return t.form},get params(){return t.page.params}}),R=>a()[0]=R,()=>a()?.[0])}),g(n,h)};V(i,n=>{t.constructors[1]?n(d):n(v,!1)})}var x=$(i,2);{var m=n=>{var _=lt(),h=et(_);{var P=E=>{var p=st();ot(()=>at(p,f(s))),g(E,p)};V(h,E=>{f(e)&&E(P)})}rt(_),g(n,_)};V(x,n=>{f(r)&&n(m)})}g(c,O),tt()}const yt=nt(mt),Ot=[()=>o(()=>import("../nodes/0.CCd664k7.js"),__vite__mapDeps([0,1,2,3,4,5,6,7,8,9,10,11,12]),import.meta.url),()=>o(()=>import("../nodes/1.CyFJoHvz.js"),__vite__mapDeps([13,1,2,3,8,6]),import.meta.url),()=>o(()=>import("../nodes/2.XEoeauIa.js"),__vite__mapDeps([14,1,2,3,4,10,5,6,15,16,17,18,11,19]),import.meta.url),()=>o(()=>import("../nodes/3.oB-RZdV5.js"),__vite__mapDeps([20,1,2,3,4,10,5,6,16,17,21,22,23,24,25,15,11,19,26,27,28]),import.meta.url),()=>o(()=>import("../nodes/4.DDn9he0o.js"),__vite__mapDeps([29,1,2,3,4,5,6,16,17,21,22,23,24,10,25,15,11,19,26]),import.meta.url),()=>o(()=>import("../nodes/5.A7yiJpQp.js"),__vite__mapDeps([30,1,2,3,4,5,6,21,10,16,17,18,19,15,31,32,11,23,24,25,33,26,27,28]),import.meta.url),()=>o(()=>import("../nodes/6.ZyhHjSpx.js"),__vite__mapDeps([34,1,2,3,4,5,6,7,8,31,10,16,17,19,18,32,35,23,15,24,25,33,27,28,36,37,11,38,39]),import.meta.url),()=>o(()=>import("../nodes/7.CzX45v88.js"),__vite__mapDeps([40,1,2,3,4,5,6,16,17,8,9,11,19]),import.meta.url),()=>o(()=>import("../nodes/8.BUYkNug1.js"),__vite__mapDeps([41,1,2,3,4,5,6,32,18,21,10,11,24,25,16,23,19,33,27,28]),import.meta.url),()=>o(()=>import("../nodes/9.D2F8Tae4.js"),__vite__mapDeps([42,1,2,3,4,10,5,6,36,7,8,32,18,28,23,19]),import.meta.url),()=>o(()=>import("../nodes/10.BcDuisC2.js"),__vite__mapDeps([43,1,2,3,4,5,6,16,17,8,9,19]),import.meta.url),()=>o(()=>import("../nodes/11.BtC3ypSa.js"),__vite__mapDeps([44,1,2,3,4,5,6,10,16,17,18,22,23,19,15,31,32,21,11,24,25,33,26,27,28]),import.meta.url),()=>o(()=>import("../nodes/12.PNl__Wik.js"),__vite__mapDeps([45,1,2,3,4,5,6,7,8,31,10,16,17,19,18,32,35,23,15,24,25,33,27,28,36,37,46,11,38,39]),import.meta.url),()=>o(()=>import("../nodes/13.kIWXAcC-.js"),__vite__mapDeps([47,1,2,3,4,5,6,21,38,10,16,17,18,19,39,48,15,32,11,23,24,25,33,26,27,28,49]),import.meta.url),()=>o(()=>import("../nodes/14.BAdXPegi.js"),__vite__mapDeps([50,1,2,3,4,10,5,6,7,8,48,16,17,18,39,19,15,32,37,24,25,23,33,27,28,11]),import.meta.url),()=>o(()=>import("../nodes/15.CAiEMQ9A.js"),__vite__mapDeps([51,1,2,3,4,5,6,10,16,17,18,19,22,23,15,31,32,21,11,24,25,33,26,27,28]),import.meta.url),()=>o(()=>import("../nodes/16.DBIP7RdC.js"),__vite__mapDeps([52,1,2,3,4,5,6,7,8,31,10,16,17,19,18,32,35,23,15,24,25,33,27,28,36,37,46,11,38,39]),import.meta.url),()=>o(()=>import("../nodes/17.BttmUS7o.js"),__vite__mapDeps([53,1,2,3,4,5,6,21,10,16,17,18,39,19,54,32,15,11,23,24,25,33,26,27,28,49]),import.meta.url),()=>o(()=>import("../nodes/18.Cp7csezF.js"),__vite__mapDeps([55,1,2,3,4,5,6,7,8,54,16,17,18,19,39,32,37,24,10,25,23,33,27,28,11]),import.meta.url)],Lt=[],At={"/":[2],"/credentials":[3],"/endpoints":[4],"/enterprises":[5],"/enterprises/[id]":[6],"/init":[7],"/instances":[8],"/instances/[id]":[9],"/login":[10],"/organizations":[11],"/organizations/[id]":[12],"/pools":[13],"/pools/[id]":[14],"/repositories":[15],"/repositories/[id]":[16],"/scalesets":[17],"/scalesets/[id]":[18]},dt={handleError:({error:c})=>{console.error(c)},reroute:()=>{},transport:{}},ft=Object.fromEntries(Object.entries(dt.transport).map(([c,t])=>[c,t.decode])),bt=!1,Tt=(c,t)=>ft[c](t);export{Tt as decode,ft as decoders,At as dictionary,bt as hash,dt as hooks,Rt as matchers,Ot as nodes,yt as root,Lt as server_loads}; diff --git a/webapp/assets/_app/immutable/entry/start.AxRSx0k5.js b/webapp/assets/_app/immutable/entry/start.AxRSx0k5.js deleted file mode 100644 index 8af5e582..00000000 --- a/webapp/assets/_app/immutable/entry/start.AxRSx0k5.js +++ /dev/null @@ -1 +0,0 @@ -import{l as o,a as r}from"../chunks/CPCsbdkz.js";export{o as load_css,r as start}; diff --git a/webapp/assets/_app/immutable/nodes/0.CCd664k7.js b/webapp/assets/_app/immutable/nodes/0.CCd664k7.js deleted file mode 100644 index e9b29991..00000000 --- a/webapp/assets/_app/immutable/nodes/0.CCd664k7.js +++ /dev/null @@ -1,13 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as Le}from"../chunks/zNh6Oe5P.js";import{p as Be,o as qe,s as h,m as F,g as e,l as X,a as Ae,f as g,b as E,j as s,k as l,r as t,u as o,t as P,v as ge,c as i,B as U,C as Y,n as fe,e as R,d as $e,q as gr,h as hr,$ as ur}from"../chunks/sWNKMed7.js";import{a as xe,i as M,s as Ve}from"../chunks/Ccl3fNd2.js";import{d as n,c as w,s as Q,h as fr,B as mr,e as Oe}from"../chunks/DVl4ZBgx.js";import{p as Ne}from"../chunks/CJwphPxi.js";import{g as me}from"../chunks/CPCsbdkz.js";import{b as Ue,a as xr}from"../chunks/CVQRp8zk.js";import{e as ne,i as ce,w as pr}from"../chunks/BuuPrWMc.js";import{t as De}from"../chunks/BZUCTtPY.js";const kr=async({url:Z})=>({url:Z.pathname}),br=!1,yr=!1,va=Object.freeze(Object.defineProperty({__proto__:null,load:kr,prerender:br,ssr:yr},Symbol.toStringTag,{value:"Module"}));var _r=g('
                Live Updates
                '),wr=g('
                Connecting
                '),Mr=g('
                Updates Unavailable
                '),jr=g('
                Manual Refresh
                '),zr=Y(''),Cr=Y(''),Hr=Y(''),Sr=Y(''),Lr=g(' '),Br=g(' '),Ar=g('
                '),$r=g('
                '),Vr=g('
                '),Ir=g('
                '),Rr=Y(''),Tr=Y(''),Pr=Y(''),Er=Y(''),Gr=g(' '),Or=g(' '),Dr=g('
                '),qr=g('
                '),Nr=g('
                GARM GARM

                GARM

                ',1);function Ur(Z,ee){Be(ee,!1);const[re,he]=Ve(),j=()=>xe(pr,"$websocketStore",re),m=()=>xe(Ne,"$page",re),u=F(),y=F();let z=F(!1),G=F(!1),f=F(!1);qe(()=>{C(),window.matchMedia("(prefers-color-scheme: dark)").addEventListener("change",v)});function C(){const a=localStorage.getItem("theme");a==="dark"?h(f,!0):a==="light"?h(f,!1):h(f,window.matchMedia("(prefers-color-scheme: dark)").matches),p()}function v(a){(!localStorage.getItem("theme")||localStorage.getItem("theme")==="system")&&(h(f,a.matches),p())}function O(){h(f,!e(f)),localStorage.setItem("theme",e(f)?"dark":"light"),p()}function p(){e(f)?document.documentElement.classList.add("dark"):document.documentElement.classList.remove("dark")}function H(){Ue.logout(),h(G,!1)}const le=[{href:n("/"),label:"Dashboard",icon:["M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2H5a2 2 0 00-2-2z","M8 5a2 2 0 012-2h4a2 2 0 012 2v2H8V5z"]},{href:n("/repositories"),label:"Repositories",icon:["M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2H5a2 2 0 00-2-2z","M8 5a2 2 0 012-2h4a2 2 0 012 2v2H8V5z"]},{href:n("/organizations"),label:"Organizations",icon:"M19 21V5a2 2 0 00-2-2H7a2 2 0 00-2 2v16m14 0h2m-2 0h-5m-9 0H3m2 0h5M9 7h1m-1 4h1m4-4h1m-1 4h1m-5 10v-5a1 1 0 011-1h2a1 1 0 011 1v5m-4 0h4"},{href:n("/enterprises"),label:"Enterprises",icon:"M19 21V5a2 2 0 00-2-2H7a2 2 0 00-2 2v16m14 0h2m-2 0h-5m-9 0H3m2 0h5M9 7h1m-1 4h1m4-4h1m-1 4h1m-5 10v-5a1 1 0 011-1h2a1 1 0 011 1v5m-4 0h4"},{href:n("/pools"),label:"Pools",icon:"M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z"},{href:n("/scalesets"),label:"Scale Sets",icon:"M4 7v10c0 2.21 3.582 4 8 4s8-1.79 8-4V7M4 7c0 2.21 3.582 4 8 4s8-1.79 8-4M4 7c0-2.21 3.582-4 8-4s8 1.79 8 4m0 5c0 2.21-3.582 4-8 4s-8-1.79-8-4"},{href:n("/instances"),label:"Runners",icon:"M9 3v2m6-2v2M9 19v2m6-2v2M5 9H3m2 6H3m18-6h-2m2 6h-2M7 19h10a2 2 0 002-2V7a2 2 0 00-2-2H7a2 2 0 00-2 2v10a2 2 0 002 2zM9 9h6v6H9V9z"}],J=[{href:n("/credentials"),label:"Credentials",icon:"M15 7a2 2 0 012 2m4 0a6 6 0 01-7.743 5.743L11 17H9v2H7v2H4a1 1 0 01-1-1v-2.586a1 1 0 01.293-.707l5.964-5.964A6 6 0 1721 9z"},{href:n("/endpoints"),label:"Endpoints",icon:"M13 10V3L4 14h7v7l9-11h-7z"}];X(()=>j(),()=>{h(u,j())}),X(()=>m(),()=>{m().url.pathname&&h(z,!1)}),X(()=>m(),()=>{h(y,m().url.pathname)}),Ae(),Le();var D=Nr(),I=E(D),q=s(I),ae=s(q),K=s(ae),te=s(K),b=s(te),S=l(b,2);t(te),t(K);var N=l(K,2),oe=s(N),de=s(oe),pe=s(de);{var be=a=>{var r=_r();i(a,r)},Qe=a=>{var r=U(),c=E(r);{var _=x=>{var L=wr();i(x,L)},B=x=>{var L=U(),W=E(L);{var A=k=>{var T=Mr();i(k,T)},$=k=>{var T=jr();i(k,T)};M(W,k=>{e(u),o(()=>e(u).error)?k(A):k($,!1)},!0)}i(x,L)};M(c,x=>{e(u),o(()=>e(u).connecting)?x(_):x(B,!1)},!0)}i(a,r)};M(pe,a=>{e(u),o(()=>e(u).connected)?a(be):a(Qe,!1)})}t(de);var ke=l(de,2),Fe=s(ke);{var Je=a=>{var r=zr();i(a,r)},Ke=a=>{var r=Cr();i(a,r)};M(Fe,a=>{e(f)?a(Je):a(Ke,!1)})}t(ke),t(oe),t(N),t(ae);var Ie=l(ae,2),Re=s(Ie);ne(Re,1,()=>le,ce,(a,r)=>{var c=Lr(),_=s(c),B=s(_);{var x=A=>{var $=U(),k=E($);ne(k,1,()=>(e(r),o(()=>e(r).icon)),ce,(T,se)=>{var d=Hr();P(()=>w(d,"d",e(se))),i(T,d)}),i(A,$)},L=A=>{var $=Sr();P(()=>w($,"d",(e(r),o(()=>e(r).icon)))),i(A,$)};M(B,A=>{e(r),o(()=>Array.isArray(e(r).icon))?A(x):A(L,!1)})}t(_);var W=l(_);t(c),P(()=>{w(c,"href",(e(r),o(()=>e(r).href))),Q(c,1,`group flex items-center px-2 py-2 text-sm font-medium rounded-md transition-colors duration-200 - ${e(y),e(r),o(()=>e(y)===e(r).href?"bg-gray-100 text-gray-900 dark:bg-gray-700 dark:text-white":"text-gray-600 hover:bg-gray-50 hover:text-gray-900 dark:text-gray-300 dark:hover:bg-gray-700 dark:hover:text-white")??""}`),ge(W,` ${e(r),o(()=>e(r).label)??""}`)}),i(a,c)});var ye=l(Re,2);ne(ye,5,()=>J,ce,(a,r)=>{var c=Br(),_=s(c),B=s(_);t(_);var x=l(_);t(c),P(()=>{w(c,"href",(e(r),o(()=>e(r).href))),Q(c,1,`group flex items-center px-2 py-2 text-sm font-medium rounded-md transition-colors duration-200 - ${e(y),e(r),o(()=>e(y)===e(r).href?"bg-gray-100 text-gray-900 dark:bg-gray-700 dark:text-white":"text-gray-600 hover:bg-gray-50 hover:text-gray-900 dark:text-gray-300 dark:hover:bg-gray-700 dark:hover:text-white")??""}`),w(B,"d",(e(r),o(()=>e(r).icon))),ge(x,` ${e(r),o(()=>e(r).label)??""}`)}),i(a,c)}),t(ye);var Te=l(ye,2),We=s(Te);t(Te),t(Ie),t(q),t(I);var _e=l(I,2),we=s(_e),Pe=s(we),Me=l(Pe,2),je=s(Me),ze=l(je,2),Ee=l(ze,4),Xe=s(Ee);{var Ye=a=>{var r=Ar();i(a,r)},Ze=a=>{var r=U(),c=E(r);{var _=x=>{var L=$r();i(x,L)},B=x=>{var L=U(),W=E(L);{var A=k=>{var T=Vr();i(k,T)},$=k=>{var T=Ir();i(k,T)};M(W,k=>{e(u),o(()=>e(u).error)?k(A):k($,!1)},!0)}i(x,L)};M(c,x=>{e(u),o(()=>e(u).connecting)?x(_):x(B,!1)},!0)}i(a,r)};M(Xe,a=>{e(u),o(()=>e(u).connected)?a(Ye):a(Ze,!1)})}t(Ee),t(Me);var Ce=l(Me,2),er=s(Ce);{var rr=a=>{var r=Rr();i(a,r)},ar=a=>{var r=Tr();i(a,r)};M(er,a=>{e(f)?a(rr):a(ar,!1)})}t(Ce),t(we);var tr=l(we,2);{var or=a=>{var r=Dr(),c=s(r),_=l(c,2),B=s(_),x=s(B);t(B);var L=l(B,2),W=s(L),A=s(W);ne(A,1,()=>le,ce,(se,d)=>{var V=Gr(),ie=s(V),He=s(ie);{var Se=ve=>{var ue=U(),dr=E(ue);ne(dr,1,()=>(e(d),o(()=>e(d).icon)),ce,(vr,cr)=>{var Ge=Pr();P(()=>w(Ge,"d",e(cr))),i(vr,Ge)}),i(ve,ue)},nr=ve=>{var ue=Er();P(()=>w(ue,"d",(e(d),o(()=>e(d).icon)))),i(ve,ue)};M(He,ve=>{e(d),o(()=>Array.isArray(e(d).icon))?ve(Se):ve(nr,!1)})}t(ie);var lr=l(ie);t(V),P(()=>{w(V,"href",(e(d),o(()=>e(d).href))),Q(V,1,`group flex items-center px-2 py-2 text-base font-medium rounded-md transition-colors duration-200 - ${e(y),e(d),o(()=>e(y)===e(d).href?"bg-gray-100 dark:bg-gray-700 text-gray-900 dark:text-white":"text-gray-600 hover:bg-gray-50 hover:text-gray-900 dark:text-gray-300 dark:hover:bg-gray-700 dark:hover:text-white")??""}`),ge(lr,` ${e(d),o(()=>e(d).label)??""}`)}),R("click",V,()=>h(z,!1)),i(se,V)});var $=l(A,2);ne($,5,()=>J,ce,(se,d)=>{var V=Or(),ie=s(V),He=s(ie);t(ie);var Se=l(ie);t(V),P(()=>{w(V,"href",(e(d),o(()=>e(d).href))),Q(V,1,`group flex items-center px-2 py-2 text-base font-medium rounded-md transition-colors duration-200 - ${e(y),e(d),o(()=>e(y)===e(d).href?"bg-gray-100 dark:bg-gray-700 text-gray-900 dark:text-white":"text-gray-600 hover:bg-gray-50 hover:text-gray-900 dark:text-gray-300 dark:hover:bg-gray-700 dark:hover:text-white")??""}`),w(He,"d",(e(d),o(()=>e(d).icon))),ge(Se,` ${e(d),o(()=>e(d).label)??""}`)}),R("click",V,()=>h(z,!1)),i(se,V)}),t($);var k=l($,2),T=s(k);t(k),t(W),t(L),t(_),t(r),R("click",c,()=>h(z,!1)),R("keydown",c,se=>{se.key==="Escape"&&h(z,!1)}),R("click",x,()=>h(z,!1)),R("click",T,H),i(a,r)};M(tr,a=>{e(z)&&a(or)})}t(_e);var sr=l(_e,2);{var ir=a=>{var r=qr();R("click",r,()=>h(G,!1)),R("keydown",r,c=>{c.key==="Escape"&&h(G,!1)}),i(a,r)};M(sr,a=>{e(G)&&a(ir)})}P((a,r,c,_,B)=>{w(te,"href",a),w(b,"src",r),w(S,"src",c),w(ke,"title",e(f)?"Switch to Light Mode":"Switch to Dark Mode"),w(je,"src",_),Q(je,1,`${e(f)?"hidden":"block"} h-8 w-8`),w(ze,"src",B),Q(ze,1,`${e(f)?"block":"hidden"} h-8 w-8`)},[()=>(fe(n),o(()=>n("/"))),()=>(fe(n),o(()=>n("/assets/garm-light.svg"))),()=>(fe(n),o(()=>n("/assets/garm-dark.svg"))),()=>(fe(n),o(()=>n("/assets/garm-light.svg"))),()=>(fe(n),o(()=>n("/assets/garm-dark.svg")))]),R("click",ke,O),R("click",We,H),R("click",Pe,()=>h(z,!e(z))),R("click",Ce,O),i(Z,D),$e(),he()}var Qr=g("
                "),Fr=g('

                '),Jr=g('
                ');function Kr(Z,ee){Be(ee,!1);const[re,he]=Ve(),j=()=>xe(De,"$toastStore",re),m=F();function u(C){switch(C){case"success":return` - - `;case"error":return` - - `;case"warning":return` - - `;case"info":default:return` - - `}}function y(C){switch(C){case"success":return"bg-green-50 dark:bg-green-900 border-green-200 dark:border-green-700";case"error":return"bg-red-50 dark:bg-red-900 border-red-200 dark:border-red-700";case"warning":return"bg-yellow-50 dark:bg-yellow-900 border-yellow-200 dark:border-yellow-700";case"info":default:return"bg-blue-50 dark:bg-blue-900 border-blue-200 dark:border-blue-700"}}function z(C){switch(C){case"success":return"text-green-800 dark:text-green-200";case"error":return"text-red-800 dark:text-red-200";case"warning":return"text-yellow-800 dark:text-yellow-200";case"info":default:return"text-blue-800 dark:text-blue-200"}}function G(C){switch(C){case"success":return"text-green-700 dark:text-green-300";case"error":return"text-red-700 dark:text-red-300";case"warning":return"text-yellow-700 dark:text-yellow-300";case"info":default:return"text-blue-700 dark:text-blue-300"}}X(()=>j(),()=>{h(m,j())}),Ae(),Le();var f=Jr();ne(f,5,()=>e(m),C=>C.id,(C,v)=>{var O=Fr(),p=s(O),H=s(p),le=s(H);fr(le,()=>(e(v),o(()=>u(e(v).type)))),t(H);var J=l(H,2),D=s(J),I=s(D,!0);t(D);var q=l(D,2);{var ae=b=>{var S=Qr(),N=s(S,!0);t(S),P(oe=>{Q(S,1,`mt-1 text-sm ${oe??""}`),ge(N,(e(v),o(()=>e(v).message)))},[()=>(e(v),o(()=>G(e(v).type)))]),i(b,S)};M(q,b=>{e(v),o(()=>e(v).message)&&b(ae)})}t(J);var K=l(J,2),te=s(K);{let b=gr(()=>(e(v),o(()=>e(v).type==="success"?"text-green-400 hover:text-green-500 focus:ring-green-500":e(v).type==="error"?"text-red-400 hover:text-red-500 focus:ring-red-500":e(v).type==="warning"?"text-yellow-400 hover:text-yellow-500 focus:ring-yellow-500":"text-blue-400 hover:text-blue-500 focus:ring-blue-500")));mr(te,{variant:"ghost",size:"sm","aria-label":"Dismiss notification",icon:"",get class(){return e(b)},$$events:{click:()=>De.remove(e(v).id)}})}t(K),t(p),t(O),P((b,S)=>{Q(O,1,`relative rounded-lg border p-4 shadow-lg transition-all duration-300 ease-in-out ${b??""}`),Q(D,1,`text-sm font-medium ${S??""}`),ge(I,(e(v),o(()=>e(v).title)))},[()=>(e(v),o(()=>y(e(v).type))),()=>(e(v),o(()=>z(e(v).type)))]),i(C,O)}),t(f),i(Z,f),$e(),he()}var Wr=g('

                Loading...

                '),Xr=g('

                Redirecting to login...

                '),Yr=g('
                '),Zr=g(" ",1);function ca(Z,ee){Be(ee,!1);const[re,he]=Ve(),j=()=>xe(Ne,"$page",re),m=()=>xe(xr,"$authStore",re),u=F(),y=F(),z=F();qe(()=>{Ue.init(),setTimeout(()=>{const p=j().url.pathname===n("/login"),H=j().url.pathname===n("/init");!p&&!H&&!m().isAuthenticated&&!m().loading&&(m().needsInitialization?me(n("/init")):me(n("/login")))},200)}),X(()=>(m(),j(),me),()=>{if(!m().loading){const p=j().url.pathname===n("/login"),H=j().url.pathname===n("/init");!p&&!H&&!m().isAuthenticated&&(m().needsInitialization?me(n("/init")):me(n("/login")))}}),X(()=>(j(),n),()=>{h(u,j().url.pathname===n("/login"))}),X(()=>(j(),n),()=>{h(y,j().url.pathname===n("/init"))}),X(()=>(e(u),e(y)),()=>{h(z,!e(u)&&!e(y))}),Ae(),Le();var G=Zr();hr(p=>{ur.title="GARM - GitHub Actions Runner Manager"});var f=E(G);{var C=p=>{var H=Wr();i(p,H)},v=p=>{var H=U(),le=E(H);{var J=I=>{var q=Xr();i(I,q)},D=I=>{var q=U(),ae=E(q);{var K=b=>{var S=U(),N=E(S);Oe(N,ee,"default",{}),i(b,S)},te=b=>{var S=Yr(),N=s(S);Ur(N,{});var oe=l(N,2),de=s(oe),pe=s(de),be=s(pe);Oe(be,ee,"default",{}),t(pe),t(de),t(oe),t(S),i(b,S)};M(ae,b=>{e(u)||e(y)?b(K):b(te,!1)},!0)}i(I,q)};M(le,I=>{e(z),m(),o(()=>e(z)&&!m().isAuthenticated)?I(J):I(D,!1)},!0)}i(p,H)};M(f,p=>{m(),o(()=>m().loading)?p(C):p(v,!1)})}var O=l(f,2);Kr(O,{}),i(Z,G),$e(),he()}export{ca as component,va as universal}; diff --git a/webapp/assets/_app/immutable/nodes/1.CyFJoHvz.js b/webapp/assets/_app/immutable/nodes/1.CyFJoHvz.js deleted file mode 100644 index eae5dc5a..00000000 --- a/webapp/assets/_app/immutable/nodes/1.CyFJoHvz.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as u}from"../chunks/zNh6Oe5P.js";import{p as h,f as g,b as v,t as d,c as l,d as _,j as s,r as a,k as x,v as o}from"../chunks/sWNKMed7.js";import{s as k,p}from"../chunks/CPCsbdkz.js";const $={get error(){return p.error},get status(){return p.status}};k.updated.check;const i=$;var b=g("

                ",1);function y(m,c){h(c,!1),u();var r=b(),t=v(r),n=s(t,!0);a(t);var e=x(t,2),f=s(e,!0);a(e),d(()=>{o(n,i.status),o(f,i.error?.message)}),l(m,r),_()}export{y as component}; diff --git a/webapp/assets/_app/immutable/nodes/10.BcDuisC2.js b/webapp/assets/_app/immutable/nodes/10.BcDuisC2.js deleted file mode 100644 index ad7ee403..00000000 --- a/webapp/assets/_app/immutable/nodes/10.BcDuisC2.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as Z}from"../chunks/zNh6Oe5P.js";import{p as ee,o as ae,l as re,a as te,f as K,h as se,t as _,g as a,e as k,c as w,d as de,$ as oe,k as d,D as ie,m as f,j as r,u as B,n as D,s as i,r as t,z as q,v as I}from"../chunks/sWNKMed7.js";import{i as le,s as ne,a as ce}from"../chunks/Ccl3fNd2.js";import{B as me,d as l,c as T,r as U}from"../chunks/DVl4ZBgx.js";import{b as C}from"../chunks/CLagxtgo.js";import{p as ue}from"../chunks/D4Caz1gY.js";import{g as H}from"../chunks/CPCsbdkz.js";import{a as pe,b as ve}from"../chunks/CVQRp8zk.js";import{e as fe}from"../chunks/BZiHL9L3.js";var ge=K('

                '),he=K('
                GARM

                Sign in to GARM

                GitHub Actions Runner Manager

                ');function Ae(W,F){ee(F,!1);const[J,N]=ne(),$=()=>ce(pe,"$authStore",J);let m=f(""),u=f(""),o=f(!1),n=f("");ae(()=>{O()});function O(){const e=localStorage.getItem("theme");let s=!1;e==="dark"?s=!0:e==="light"?s=!1:s=window.matchMedia("(prefers-color-scheme: dark)").matches,s?document.documentElement.classList.add("dark"):document.documentElement.classList.remove("dark")}async function M(){if(!a(m)||!a(u)){i(n,"Please enter both username and password");return}i(o,!0),i(n,"");try{await ve.login(a(m),a(u)),H(l("/"))}catch(e){i(n,fe(e))}finally{i(o,!1)}}function L(e){e.key==="Enter"&&M()}re(()=>($(),l),()=>{$().isAuthenticated&&H(l("/"))}),te(),Z();var g=he();se(e=>{oe.title="Login - GARM"});var z=r(g),h=r(z),A=r(h),S=r(A),Q=d(S,2);t(A),q(4),t(h);var b=d(h,2),x=r(b),y=r(x),p=d(r(y),2);U(p),t(y);var P=d(y,2),v=d(r(P),2);U(v),t(P),t(x);var G=d(x,2);{var V=e=>{var s=ge(),c=r(s),E=d(r(c),2),j=r(E),Y=r(j,!0);t(j),t(E),t(c),t(s),_(()=>I(Y,a(n))),w(e,s)};le(G,e=>{a(n)&&e(V)})}var R=d(G,2),X=r(R);me(X,{type:"submit",variant:"primary",size:"md",fullWidth:!0,get disabled(){return a(o)},get loading(){return a(o)},children:(e,s)=>{q();var c=ie();_(()=>I(c,a(o)?"Signing in...":"Sign in")),w(e,c)},$$slots:{default:!0}}),t(R),t(b),t(z),t(g),_((e,s)=>{T(S,"src",e),T(Q,"src",s),p.disabled=a(o),v.disabled=a(o)},[()=>(D(l),B(()=>l("/assets/garm-light.svg"))),()=>(D(l),B(()=>l("/assets/garm-dark.svg")))]),C(p,()=>a(m),e=>i(m,e)),k("keypress",p,L),C(v,()=>a(u),e=>i(u,e)),k("keypress",v,L),k("submit",b,ue(M)),w(W,g),de(),N()}export{Ae as component}; diff --git a/webapp/assets/_app/immutable/nodes/11.BtC3ypSa.js b/webapp/assets/_app/immutable/nodes/11.BtC3ypSa.js deleted file mode 100644 index 84ce514f..00000000 --- a/webapp/assets/_app/immutable/nodes/11.BtC3ypSa.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as Le}from"../chunks/zNh6Oe5P.js";import{p as Ne,E as Qe,o as je,l as x,s as a,m as s,g as e,y as W,a as qe,f as T,k as i,j as o,r as n,c as E,t as Q,v as ie,x as Ae,u as v,z as Fe,e as $e,d as Ge,h as Xe,b as Ze,$ as et,n as P,q as ue}from"../chunks/sWNKMed7.js";import{a as He,i as X,s as Je}from"../chunks/Ccl3fNd2.js";import{r as ge,b as Ue,h as tt,d as Ie,c as at,g as me}from"../chunks/DVl4ZBgx.js";import{e as rt,i as ot}from"../chunks/BuuPrWMc.js";import{b as Re,a as We}from"../chunks/CLagxtgo.js";import{p as nt}from"../chunks/D4Caz1gY.js";import{M as it}from"../chunks/DN14Fk2Y.js";import{F as st}from"../chunks/z0u7Z3zm.js";import{e as Pe}from"../chunks/BZiHL9L3.js";import{e as Ve,a as Me}from"../chunks/DA-798Ko.js";import{U as lt}from"../chunks/XzGG0o_q.js";import{D as dt}from"../chunks/kJjQhR0J.js";import{P as ct}from"../chunks/qfys27k5.js";import{t as K}from"../chunks/BZUCTtPY.js";import{B as ut,k as Ce,g as Oe,l as gt}from"../chunks/cjRLNre3.js";import{D as mt,A as Be,G as pt,a as ft}from"../chunks/DCYYzf48.js";import{E as bt}from"../chunks/DAWfW-VQ.js";import{E as vt}from"../chunks/CARsAFuo.js";import{S as yt}from"../chunks/BJXodF8n.js";var ht=T('

                '),_t=T('

                Loading...

                '),xt=T(""),kt=T(''),wt=T('

                Webhook secret will be automatically generated

                '),zt=T('
                '),$t=T('

                Create Organization

                ');function Ct(pe,fe){Ne(fe,!1);const[be,ve]=Je(),f=()=>He(Ve,"$eagerCache",be),D=s(),w=s(),z=s(),Z=s(),$=Qe();let C=s(!1),b=s(""),y=s("github"),r=s({name:"",credentials_name:"",webhook_secret:"",pool_balancer_type:"roundrobin"}),k=s(!0),p=s(!0);async function A(){if(!f().loaded.credentials&&!f().loading.credentials)try{await Me.getCredentials()}catch(d){a(b,Pe(d))}}function B(d){a(y,d.detail),W(r,e(r).credentials_name="")}function c(){if(e(r).credentials_name){const d=e(D).find(L=>L.name===e(r).credentials_name);d&&d.forge_type&&a(y,d.forge_type)}}function ye(){const d=new Uint8Array(32);return crypto.getRandomValues(d),Array.from(d,L=>L.toString(16).padStart(2,"0")).join("")}async function he(){if(!e(r).name?.trim()){a(b,"Organization name is required");return}if(!e(r).credentials_name){a(b,"Please select credentials");return}try{a(C,!0),a(b,"");const d={...e(r),install_webhook:e(k),auto_generate_secret:e(p)};$("submit",d)}catch(d){a(b,d instanceof Error?d.message:"Failed to create organization"),a(C,!1)}}je(()=>{A()}),x(()=>f(),()=>{a(D,f().credentials)}),x(()=>f(),()=>{a(w,f().loading.credentials)}),x(()=>(e(D),e(y)),()=>{a(z,e(D).filter(d=>e(y)?d.forge_type===e(y):!0))}),x(()=>e(p),()=>{e(p)?W(r,e(r).webhook_secret=ye()):e(p)||W(r,e(r).webhook_secret="")}),x(()=>(e(r),e(p)),()=>{a(Z,e(r).name?.trim()!==""&&e(r).credentials_name!==""&&(e(p)||e(r).webhook_secret&&e(r).webhook_secret.trim()!==""))}),qe(),Le(),it(pe,{$$events:{close:()=>$("close")},children:(d,L)=>{var ee=$t(),N=i(o(ee),2);{var te=h=>{var _=ht(),F=o(_),U=o(F,!0);n(F),n(_),Q(()=>ie(U,e(b))),E(h,_)};X(N,h=>{e(b)&&h(te)})}var _e=i(N,2);{var xe=h=>{var _=_t();E(h,_)},ke=h=>{var _=zt(),F=o(_);st(F,{get selectedForgeType(){return e(y)},set selectedForgeType(l){a(y,l)},$$events:{select:B},$$legacy:!0});var U=i(F,2),se=i(o(U),2);ge(se),n(U);var j=i(U,2),S=i(o(j),2);Q(()=>{e(r),Ae(()=>{e(z)})});var q=o(S);q.value=q.__value="";var we=i(q);rt(we,1,()=>e(z),ot,(l,m)=>{var O=xt(),de=o(O);n(O);var ce={};Q(()=>{ie(de,`${e(m),v(()=>e(m).name)??""} (${e(m),v(()=>e(m).endpoint?.name||"Unknown endpoint")??""})`),ce!==(ce=(e(m),v(()=>e(m).name)))&&(O.value=(O.__value=(e(m),v(()=>e(m).name)))??"")}),E(l,O)}),n(S),n(j);var G=i(j,2),ae=i(o(G),2);Q(()=>{e(r),Ae(()=>{})});var H=o(ae);H.value=H.__value="roundrobin";var le=i(H);le.value=le.__value="pack",n(ae),n(G);var re=i(G,2),oe=o(re),t=o(oe);ge(t),Fe(2),n(oe);var u=i(oe,2),I=o(u),M=o(I);ge(M),Fe(2),n(I);var g=i(I,2);{var J=l=>{var m=kt();ge(m),Re(m,()=>e(r).webhook_secret,O=>W(r,e(r).webhook_secret=O)),E(l,m)},ne=l=>{var m=wt();E(l,m)};X(g,l=>{e(p)?l(ne,!1):l(J)})}n(u),n(re);var V=i(re,2),Y=o(V),R=i(Y,2),ze=o(R,!0);n(R),n(V),n(_),Q(()=>{R.disabled=e(C)||e(w)||!e(Z),ie(ze,e(C)?"Creating...":"Create Organization")}),Re(se,()=>e(r).name,l=>W(r,e(r).name=l)),Ue(S,()=>e(r).credentials_name,l=>W(r,e(r).credentials_name=l)),$e("change",S,c),Ue(ae,()=>e(r).pool_balancer_type,l=>W(r,e(r).pool_balancer_type=l)),We(t,()=>e(k),l=>a(k,l)),We(M,()=>e(p),l=>a(p,l)),$e("click",Y,()=>$("close")),$e("submit",_,nt(he)),E(h,_)};X(_e,h=>{e(C)?h(xe):h(ke,!1)})}n(ee),E(d,ee)},$$slots:{default:!0}}),Ge(),ve()}var Ot=T(''),Pt=T('
                ',1);function Kt(pe,fe){Ne(fe,!1);const[be,ve]=Je(),f=()=>He(Ve,"$eagerCache",be),D=s(),w=s(),z=s(),Z=s();let $=s([]),C=s(!0),b=s(""),y=s(""),r=s(1),k=s(25),p=s(!1),A=s(!1),B=s(!1),c=s(null);function ye(){a(p,!1),a(B,!1),a(A,!1)}async function he(t){try{a(b,"");const u=t.detail,I={name:u.name,credentials_name:u.credentials_name,webhook_secret:u.webhook_secret,pool_balancer_type:u.pool_balancer_type},M=await me.createOrganization(I);if(u.install_webhook&&M.id)try{await me.installOrganizationWebhook(M.id),K.success("Webhook Installed",`Webhook for organization ${M.name} has been installed successfully.`)}catch(g){console.warn("Organization created but webhook installation failed:",g),K.error("Webhook Installation Failed",g instanceof Error?g.message:"Failed to install webhook. You can try installing it manually from the organization details page.")}K.success("Organization Created",`Organization ${M.name} has been created successfully.`),a(p,!1)}catch(u){throw a(b,Pe(u)),u}}async function d(t){if(e(c))try{await me.updateOrganization(e(c).id,t),K.success("Organization Updated",`Organization ${e(c).name} has been updated successfully.`),a(A,!1),a(c,null)}catch(u){throw u}}async function L(){if(e(c))try{a(b,""),await me.deleteOrganization(e(c).id),K.success("Organization Deleted",`Organization ${e(c).name} has been deleted successfully.`),a(c,null)}catch(t){const u=Pe(t);K.error("Delete Failed",u)}finally{ye()}}function ee(){a(p,!0)}function N(t){a(c,t),a(A,!0)}function te(t){a(c,t),a(B,!0)}je(async()=>{try{a(C,!0);const t=await Me.getOrganizations();t&&Array.isArray(t)&&a($,t)}catch(t){console.error("Failed to load organizations:",t),a(b,t instanceof Error?t.message:"Failed to load organizations")}finally{a(C,!1)}});async function _e(){try{await Me.retryResource("organizations")}catch(t){console.error("Retry failed:",t)}}const xe=[{key:"name",title:"Name",cellComponent:bt,cellProps:{entityType:"organization"}},{key:"endpoint",title:"Endpoint",cellComponent:vt},{key:"credentials",title:"Credentials",cellComponent:pt,cellProps:{field:"credentials_name"}},{key:"status",title:"Status",cellComponent:yt,cellProps:{statusType:"entity"}},{key:"actions",title:"Actions",align:"right",cellComponent:ft}],ke={entityType:"organization",primaryText:{field:"name",isClickable:!0,href:"/organizations/{id}"},customInfo:[{icon:t=>Oe(t?.endpoint?.endpoint_type||"unknown"),text:t=>t?.endpoint?.name||"Unknown"}],badges:[{type:"custom",value:t=>Ce(t)}],actions:[{type:"edit",handler:t=>N(t)},{type:"delete",handler:t=>te(t)}]};function h(t){a(y,t.detail.term),a(r,1)}function _(t){a(r,t.detail.page)}function F(t){a(k,t.detail.perPage),a(r,1)}function U(t){N(t.detail.item)}function se(t){te(t.detail.item)}x(()=>(e($),f()),()=>{(!e($).length||f().loaded.organizations)&&a($,f().organizations)}),x(()=>f(),()=>{a(C,f().loading.organizations)}),x(()=>f(),()=>{a(D,f().errorMessages.organizations)}),x(()=>(e($),e(y)),()=>{a(w,gt(e($),e(y)))}),x(()=>(e(w),e(k)),()=>{a(z,Math.ceil(e(w).length/e(k)))}),x(()=>(e(r),e(z)),()=>{e(r)>e(z)&&e(z)>0&&a(r,e(z))}),x(()=>(e(w),e(r),e(k)),()=>{a(Z,e(w).slice((e(r)-1)*e(k),e(r)*e(k)))}),qe(),Le();var j=Pt();Xe(t=>{et.title="Organizations - GARM"});var S=Ze(j),q=o(S);ct(q,{title:"Organizations",description:"Manage GitHub and Gitea organizations",actionLabel:"Add Organization",$$events:{action:ee}});var we=i(q,2);{let t=ue(()=>e(D)||e(b)),u=ue(()=>!!e(D));mt(we,{get columns(){return xe},get data(){return e(Z)},get loading(){return e(C)},get error(){return e(t)},get searchTerm(){return e(y)},searchPlaceholder:"Search organizations...",get currentPage(){return e(r)},get perPage(){return e(k)},get totalPages(){return e(z)},get totalItems(){return e(w),v(()=>e(w).length)},itemName:"organizations",emptyIconType:"building",get showRetry(){return e(u)},get mobileCardConfig(){return ke},$$events:{search:h,pageChange:_,perPageChange:F,retry:_e,edit:U,delete:se},$$slots:{"mobile-card":(I,M)=>{const g=ue(()=>M.item),J=ue(()=>(P(Ce),P(e(g)),v(()=>Ce(e(g)))));var ne=Ot(),V=o(ne),Y=o(V),R=o(Y),ze=o(R,!0);n(R);var l=i(R,2),m=o(l),O=o(m);tt(O,()=>(P(Oe),P(e(g)),v(()=>Oe(e(g).endpoint?.endpoint_type||"unknown"))));var de=i(O,2),ce=o(de,!0);n(de),n(m),n(l),n(Y),n(V);var Ee=i(V,2),Te=o(Ee);ut(Te,{get variant(){return P(e(J)),v(()=>e(J).variant)},get text(){return P(e(J)),v(()=>e(J).text)}});var De=i(Te,2),Se=o(De);Be(Se,{action:"edit",size:"sm",title:"Edit organization",ariaLabel:"Edit organization",$$events:{click:()=>N(e(g))}});var Ye=i(Se,2);Be(Ye,{action:"delete",size:"sm",title:"Delete organization",ariaLabel:"Delete organization",$$events:{click:()=>te(e(g))}}),n(De),n(Ee),n(ne),Q(Ke=>{at(Y,"href",Ke),ie(ze,(P(e(g)),v(()=>e(g).name))),ie(ce,(P(e(g)),v(()=>e(g).endpoint?.name||"Unknown")))},[()=>(P(Ie),P(e(g)),v(()=>Ie(`/organizations/${e(g).id}`)))]),E(I,ne)}}})}n(S);var G=i(S,2);{var ae=t=>{Ct(t,{$$events:{close:()=>a(p,!1),submit:he}})};X(G,t=>{e(p)&&t(ae)})}var H=i(G,2);{var le=t=>{lt(t,{get entity(){return e(c)},entityType:"organization",$$events:{close:()=>{a(A,!1),a(c,null)},submit:u=>d(u.detail)}})};X(H,t=>{e(A)&&e(c)&&t(le)})}var re=i(H,2);{var oe=t=>{dt(t,{title:"Delete Organization",message:"Are you sure you want to delete this organization? This action cannot be undone.",get itemName(){return e(c),v(()=>e(c).name)},$$events:{close:()=>{a(B,!1),a(c,null)},confirm:L}})};X(re,t=>{e(B)&&e(c)&&t(oe)})}E(pe,j),Ge(),ve()}export{Kt as component}; diff --git a/webapp/assets/_app/immutable/nodes/12.PNl__Wik.js b/webapp/assets/_app/immutable/nodes/12.PNl__Wik.js deleted file mode 100644 index c0fb1a01..00000000 --- a/webapp/assets/_app/immutable/nodes/12.PNl__Wik.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as We}from"../chunks/zNh6Oe5P.js";import{p as qe,o as He,A as je,l as Ge,a as Re,f as B,h as Ve,b as C,t as j,c as x,d as Je,u as i,n as de,v as ce,g as e,m as l,j as f,k as d,s as o,$ as Ke,r as g,y as Qe,B as ue,q as m}from"../chunks/sWNKMed7.js";import{i as h,s as Xe,a as Ye}from"../chunks/Ccl3fNd2.js";import{d as A,c as Ze,g as _}from"../chunks/DVl4ZBgx.js";import{p as et}from"../chunks/CJwphPxi.js";import{g as fe}from"../chunks/CPCsbdkz.js";import{U as tt}from"../chunks/XzGG0o_q.js";import{D as ge}from"../chunks/kJjQhR0J.js";import{E as at,P as nt,a as ot}from"../chunks/CaVdfWt-.js";import{D as rt,I as it}from"../chunks/0ZGtv6cq.js";import{g as me}from"../chunks/cjRLNre3.js";import{e as S}from"../chunks/BZiHL9L3.js";import{W as st}from"../chunks/Clig3Vwb.js";import{C as lt}from"../chunks/BRFhz4VJ.js";import{w as G}from"../chunks/BuuPrWMc.js";import{t as I}from"../chunks/BZUCTtPY.js";var dt=B('

                Loading organization...

                '),ct=B('

                '),ut=B(" ",1),ft=B(' ',1);function Tt(pe,ve){qe(ve,!1);const[ye,he]=Xe(),R=()=>Ye(et,"$page",ye),w=l();let a=l(null),c=l([]),p=l([]),F=l(!0),O=l(""),D=l(!1),P=l(!1),E=l(!1),T=l(!1),u=l(null),M=null,b=l();async function V(){if(e(w))try{o(F,!0),o(O,"");const[t,n,r]=await Promise.all([_.getOrganization(e(w)),_.listOrganizationPools(e(w)).catch(()=>[]),_.listOrganizationInstances(e(w)).catch(()=>[])]);o(a,t),o(c,n),o(p,r)}catch(t){o(O,S(t))}finally{o(F,!1)}}function _e(t,n){const{events:r}=t;return{...n,events:r}}async function be(t){if(e(a))try{await _.updateOrganization(e(a).id,t),await V(),I.success("Organization Updated",`Organization ${e(a).name} has been updated successfully.`),o(D,!1)}catch(n){throw n}}async function ze(){if(e(a)){try{await _.deleteOrganization(e(a).id),fe(A("/organizations"))}catch(t){const n=S(t);I.error("Delete Failed",n)}o(P,!1)}}async function $e(){if(e(u))try{await _.deleteInstance(e(u).name),I.success("Instance Deleted",`Instance ${e(u).name} has been deleted successfully.`),o(E,!1),o(u,null)}catch(t){const n=S(t);I.error("Delete Failed",n),o(E,!1),o(u,null)}}function xe(t){o(u,t),o(E,!0)}function Ie(){o(T,!0)}async function we(t){try{if(!e(a))return;await _.createOrganizationPool(e(a).id,t.detail),I.success("Pool Created",`Pool has been created successfully for organization ${e(a).name}.`),o(T,!1)}catch(n){const r=S(n);I.error("Pool Creation Failed",r)}}function J(){e(b)&&Qe(b,e(b).scrollTop=e(b).scrollHeight)}function Ee(t){if(t.operation==="update"){const n=t.payload;if(e(a)&&n.id===e(a).id){const r=e(a).events?.length||0,s=n.events?.length||0;o(a,_e(e(a),n)),s>r&&setTimeout(()=>{J()},100)}}else if(t.operation==="delete"){const n=t.payload.id||t.payload;e(a)&&e(a).id===n&&fe(A("/organizations"))}}function Oe(t){if(!e(a))return;const n=t.payload;if(n.org_id===e(a).id){if(t.operation==="create")o(c,[...e(c),n]);else if(t.operation==="update")o(c,e(c).map(r=>r.id===n.id?n:r));else if(t.operation==="delete"){const r=n.id||n;o(c,e(c).filter(s=>s.id!==r))}}}function De(t){if(!e(a)||!e(c))return;const n=t.payload;if(e(c).some(s=>s.id===n.pool_id)){if(t.operation==="create")o(p,[...e(p),n]);else if(t.operation==="update")o(p,e(p).map(s=>s.id===n.id?n:s));else if(t.operation==="delete"){const s=n.id||n;o(p,e(p).filter(W=>W.id!==s))}}}He(()=>{V().then(()=>{e(a)?.events?.length&&setTimeout(()=>{J()},100)});const t=G.subscribeToEntity("organization",["update","delete"],Ee),n=G.subscribeToEntity("pool",["create","update","delete"],Oe),r=G.subscribeToEntity("instance",["create","update","delete"],De);M=()=>{t(),n(),r()}}),je(()=>{M&&(M(),M=null)}),Ge(()=>R(),()=>{o(w,R().params.id)}),Re(),We();var K=ft();Ve(t=>{j(()=>Ke.title=`${e(a),i(()=>e(a)?`${e(a).name} - Organization Details`:"Organization Details")??""} - GARM`)});var U=C(K),L=f(U),Q=f(L),N=f(Q),Pe=f(N);g(N);var X=d(N,2),Y=f(X),Z=d(f(Y),2),Te=f(Z,!0);g(Z),g(Y),g(X),g(Q),g(L);var Me=d(L,2);{var ke=t=>{var n=dt();x(t,n)},Ce=t=>{var n=ue(),r=C(n);{var s=z=>{var $=ct(),k=f($),q=f(k,!0);g(k),g($),j(()=>ce(q,e(O))),x(z,$)},W=z=>{var $=ue(),k=C($);{var q=H=>{var ne=ut(),oe=C(ne);{let v=m(()=>(e(a),i(()=>e(a).name||"Organization"))),y=m(()=>(e(a),i(()=>e(a).endpoint?.name))),Ne=m(()=>(de(me),e(a),i(()=>me(e(a).endpoint?.endpoint_type||"unknown"))));rt(oe,{get title(){return e(v)},get subtitle(){return`Endpoint: ${e(y)??""}`},get forgeIcon(){return e(Ne)},onEdit:()=>o(D,!0),onDelete:()=>o(P,!0)})}var re=d(oe,2);at(re,{get entity(){return e(a)},entityType:"organization"});var ie=d(re,2);{let v=m(()=>(e(a),i(()=>e(a).id||""))),y=m(()=>(e(a),i(()=>e(a).name||"")));st(ie,{entityType:"organization",get entityId(){return e(v)},get entityName(){return e(y)}})}var se=d(ie,2);{let v=m(()=>(e(a),i(()=>e(a).id||""))),y=m(()=>(e(a),i(()=>e(a).name||"")));nt(se,{get pools(){return e(c)},entityType:"organization",get entityId(){return e(v)},get entityName(){return e(y)},$$events:{addPool:Ie}})}var le=d(se,2);it(le,{get instances(){return e(p)},entityType:"organization",onDeleteInstance:xe});var Le=d(le,2);{let v=m(()=>(e(a),i(()=>e(a)?.events)));ot(Le,{get events(){return e(v)},get eventsContainer(){return e(b)},set eventsContainer(y){o(b,y)},$$legacy:!0})}x(H,ne)};h(k,H=>{e(a)&&H(q)},!0)}x(z,$)};h(r,z=>{e(O)?z(s):z(W,!1)},!0)}x(t,n)};h(Me,t=>{e(F)?t(ke):t(Ce,!1)})}g(U);var ee=d(U,2);{var Ae=t=>{tt(t,{get entity(){return e(a)},entityType:"organization",$$events:{close:()=>o(D,!1),submit:n=>be(n.detail)}})};h(ee,t=>{e(D)&&e(a)&&t(Ae)})}var te=d(ee,2);{var Se=t=>{ge(t,{title:"Delete Organization",message:"Are you sure you want to delete this organization? This action cannot be undone and will remove all associated pools and instances.",get itemName(){return e(a),i(()=>e(a).name)},$$events:{close:()=>o(P,!1),confirm:ze}})};h(te,t=>{e(P)&&e(a)&&t(Se)})}var ae=d(te,2);{var Be=t=>{ge(t,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return e(u),i(()=>e(u).name)},$$events:{close:()=>{o(E,!1),o(u,null)},confirm:$e}})};h(ae,t=>{e(E)&&e(u)&&t(Be)})}var Fe=d(ae,2);{var Ue=t=>{{let n=m(()=>(e(a),i(()=>e(a).id||"")));lt(t,{initialEntityType:"organization",get initialEntityId(){return e(n)},$$events:{close:()=>o(T,!1),submit:we}})}};h(Fe,t=>{e(T)&&e(a)&&t(Ue)})}j(t=>{Ze(Pe,"href",t),ce(Te,(e(a),i(()=>e(a)?e(a).name:"Loading...")))},[()=>(de(A),i(()=>A("/organizations")))]),x(pe,K),Je(),he()}export{Tt as component}; diff --git a/webapp/assets/_app/immutable/nodes/13.kIWXAcC-.js b/webapp/assets/_app/immutable/nodes/13.kIWXAcC-.js deleted file mode 100644 index b8a87c6e..00000000 --- a/webapp/assets/_app/immutable/nodes/13.kIWXAcC-.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as ne}from"../chunks/zNh6Oe5P.js";import{p as ie,o as ce,l as d,a as de,f as pe,h as ue,b as me,c as fe,d as ge,g as t,m as l,$ as ye,j as he,q as M,u as N,k as v,s as o,r as Pe,n as Ce}from"../chunks/sWNKMed7.js";import{i as T,s as ve,a as _e}from"../chunks/Ccl3fNd2.js";import{g as R}from"../chunks/DVl4ZBgx.js";import{P as $e}from"../chunks/qfys27k5.js";import{C as be}from"../chunks/BRFhz4VJ.js";import{U as Me}from"../chunks/BOoHOAHB.js";import{D as Te}from"../chunks/kJjQhR0J.js";import{e as Ee,a as G}from"../chunks/DA-798Ko.js";import{t as m}from"../chunks/BZUCTtPY.js";import{e as _,h as De}from"../chunks/cjRLNre3.js";import{e as E}from"../chunks/BZiHL9L3.js";import{D as we,G as D,a as ke}from"../chunks/DCYYzf48.js";import{E as Ae}from"../chunks/DAWfW-VQ.js";import{E as Ie}from"../chunks/CARsAFuo.js";import{S as Se}from"../chunks/BJXodF8n.js";import{P as xe}from"../chunks/BaVaT5nU.js";import"../chunks/C6O4o7G1.js";const V={};var Ue=pe('
                ',1);function et(q,L){ie(L,!1);const[j,H]=ve(),r=()=>_e(Ee,"$eagerCache",j),$=l(),i=l(),p=l(),w=l();let u=l([]),y=l(!0),k=l(""),h=l(""),n=l(1),c=l(25),P=l(!1),C=l(!1),f=l(!1),a=l(null);async function z(e){try{m.success("Pool Created","Pool has been created successfully."),o(P,!1)}catch(s){const g=E(s);m.error("Pool Creation Failed",g)}}async function B(e){if(t(a))try{await R.updatePool(t(a).id,e),o(C,!1),m.add({type:"success",title:"Pool Updated",message:`Pool ${t(a).id.slice(0,8)}... has been updated successfully.`}),o(a,null)}catch(s){const g=E(s);throw m.add({type:"error",title:"Update Failed",message:g}),s}}async function J(){if(!t(a))return;const e=`Pool ${t(a).id.slice(0,8)}...`;try{await R.deletePool(t(a).id),o(f,!1),m.add({type:"success",title:"Pool Deleted",message:`${e} has been deleted successfully.`}),o(a,null)}catch(s){const g=E(s);m.add({type:"error",title:"Delete Failed",message:g})}o(f,!1),o(a,null)}function K(){o(P,!0)}function A(e){o(a,e),o(C,!0)}function I(e){o(a,e),o(f,!0)}ce(async()=>{try{o(y,!0);const e=await G.getPools();e&&Array.isArray(e)&&o(u,e)}catch(e){V?.VITEST||console.error("Failed to load pools:",e),o(k,e instanceof Error?e.message:"Failed to load pools")}finally{o(y,!1)}});async function O(){try{await G.retryResource("pools")}catch(e){V?.VITEST||console.error("Retry failed:",e)}}const Q=[{key:"id",title:"ID",flexible:!0,cellComponent:Ae,cellProps:{entityType:"pool",showId:!0,fontMono:!0}},{key:"image",title:"Image",flexible:!0,cellComponent:D,cellProps:{field:"image",type:"code",showTitle:!0}},{key:"provider",title:"Provider",cellComponent:D,cellProps:{field:"provider_name"}},{key:"flavor",title:"Flavor",cellComponent:D,cellProps:{field:"flavor"}},{key:"entity",title:"Entity",cellComponent:xe},{key:"endpoint",title:"Endpoint",cellComponent:Ie},{key:"status",title:"Status",cellComponent:Se,cellProps:{statusType:"enabled"}},{key:"actions",title:"Actions",align:"right",cellComponent:ke}],W={entityType:"pool",primaryText:{field:"id",isClickable:!0,href:"/pools/{id}",useId:!0,isMonospace:!0},secondaryText:{field:"entity_name",computedValue:e=>_(e,r())},badges:[{type:"custom",value:e=>({variant:e.enabled?"success":"error",text:e.enabled?"Enabled":"Disabled"})}],actions:[{type:"edit",handler:e=>A(e)},{type:"delete",handler:e=>I(e)}]};function X(e){o(h,e.detail.term),o(n,1)}function Y(e){o(n,e.detail.page)}function Z(e){o(c,e.detail.perPage),o(n,1)}function ee(e){A(e.detail.item)}function te(e){I(e.detail.item)}d(()=>(t(u),r()),()=>{(!t(u).length||r().loaded.pools)&&o(u,r().pools)}),d(()=>r(),()=>{o(y,r().loading.pools)}),d(()=>r(),()=>{o($,r().errorMessages.pools)}),d(()=>(t(u),t(h),r()),()=>{o(i,De(t(u),t(h),e=>_(e,r())))}),d(()=>(t(i),t(c)),()=>{o(p,Math.ceil(t(i).length/t(c)))}),d(()=>(t(n),t(p)),()=>{t(n)>t(p)&&t(p)>0&&o(n,t(p))}),d(()=>(t(i),t(n),t(c)),()=>{o(w,t(i).slice((t(n)-1)*t(c),t(n)*t(c)))}),de(),ne();var S=Ue();ue(e=>{ye.title="Pools - GARM"});var b=me(S),x=he(b);$e(x,{title:"Pools",description:"Manage runner pools across all entities",actionLabel:"Add Pool",$$events:{action:K}});var oe=v(x,2);{let e=M(()=>t($)||t(k)),s=M(()=>!!t($));we(oe,{get columns(){return Q},get data(){return t(w)},get loading(){return t(y)},get error(){return t(e)},get searchTerm(){return t(h)},searchPlaceholder:"Search by entity name...",get currentPage(){return t(n)},get perPage(){return t(c)},get totalPages(){return t(p)},get totalItems(){return t(i),N(()=>t(i).length)},itemName:"pools",emptyIconType:"cog",get showRetry(){return t(s)},get mobileCardConfig(){return W},$$events:{search:X,pageChange:Y,perPageChange:Z,retry:O,edit:ee,delete:te}})}Pe(b);var U=v(b,2);{var ae=e=>{be(e,{$$events:{close:()=>o(P,!1),submit:z}})};T(U,e=>{t(P)&&e(ae)})}var F=v(U,2);{var le=e=>{Me(e,{get pool(){return t(a)},$$events:{close:()=>{o(C,!1),o(a,null)},submit:s=>B(s.detail)}})};T(F,e=>{t(C)&&t(a)&&e(le)})}var re=v(F,2);{var se=e=>{{let s=M(()=>(t(a),Ce(_),r(),N(()=>`Pool ${t(a).id.slice(0,8)}... (${_(t(a),r())})`)));Te(e,{title:"Delete Pool",message:"Are you sure you want to delete this pool? This action cannot be undone and will remove all associated runners.",get itemName(){return t(s)},$$events:{close:()=>{o(f,!1),o(a,null)},confirm:J}})}};T(re,e=>{t(f)&&t(a)&&e(se)})}fe(q,S),ge(),H()}export{et as component}; diff --git a/webapp/assets/_app/immutable/nodes/14.BAdXPegi.js b/webapp/assets/_app/immutable/nodes/14.BAdXPegi.js deleted file mode 100644 index b7811087..00000000 --- a/webapp/assets/_app/immutable/nodes/14.BAdXPegi.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as He}from"../chunks/zNh6Oe5P.js";import{p as Ve,o as We,A as Ke,l as Qe,a as Xe,f as h,h as Ye,b as B,t as b,c as x,d as Ze,u as i,n as u,v as l,g as t,m as y,j as r,k as s,s as n,$ as ta,r as a,y as ut,B as Kt,q as N}from"../chunks/sWNKMed7.js";import{i as g,s as ea,a as aa}from"../chunks/Ccl3fNd2.js";import{w as Qt,e as ra,i as da}from"../chunks/BuuPrWMc.js";import{d as F,c as Xt,g as R,s as sa}from"../chunks/DVl4ZBgx.js";import{p as ia}from"../chunks/CJwphPxi.js";import{g as Yt}from"../chunks/CPCsbdkz.js";import{U as oa}from"../chunks/BOoHOAHB.js";import{D as Zt}from"../chunks/kJjQhR0J.js";import{D as na,I as la}from"../chunks/0ZGtv6cq.js";import{t as D}from"../chunks/BZUCTtPY.js";import{e as P,i as C,j as te,b as L,g as ee}from"../chunks/cjRLNre3.js";import{e as O}from"../chunks/BZiHL9L3.js";var va=h('

                Loading pool...

                '),ca=h('

                '),ma=h('
                GitHub Runner Group
                '),xa=h(' '),ua=h('
                Tags
                '),ga=h('

                Extra Specifications

                 
                '),pa=h('

                Basic Information

                Pool ID
                Provider
                Image
                Flavor
                Status
                Entity
                Created At
                Updated At

                Configuration

                Max Runners
                Min Idle Runners
                Bootstrap Timeout
                Priority
                Runner Prefix
                OS Type / Architecture
                ',1),fa=h(' ',1);function Ua(ae,re){Ve(re,!1);const[de,se]=ea(),gt=()=>aa(ia,"$page",de),G=y();let e=y(null),J=y(!0),M=y(""),E=y(!1),A=y(!1),T=y(!1),p=y(null),U=null;async function ie(){if(t(G))try{n(J,!0),n(M,""),n(e,await R.getPool(t(G)))}catch(d){n(M,O(d))}finally{n(J,!1)}}async function oe(d){if(t(e))try{const o=await R.updatePool(t(e).id,d);n(e,o),n(E,!1),D.success("Pool Updated",`Pool ${t(e).id} has been updated successfully.`)}catch(o){const _=O(o);D.error("Update Failed",_)}}async function ne(){if(t(e)){try{await R.deletePool(t(e).id),Yt(F("/pools"))}catch(d){const o=O(d);D.error("Delete Failed",o)}n(A,!1)}}async function le(){if(t(p)){try{await R.deleteInstance(t(p).name),D.success("Instance Deleted",`Instance ${t(p).name} has been deleted successfully.`)}catch(d){const o=O(d);D.error("Delete Failed",o)}n(T,!1),n(p,null)}}function ve(d){n(p,d),n(T,!0)}function ce(d){if(!d)return"{}";try{if(typeof d=="string"){const o=JSON.parse(d);return JSON.stringify(o,null,2)}return JSON.stringify(d,null,2)}catch{return d.toString()}}function me(d){if(d.operation==="update"){const o=d.payload;t(e)&&o.id===t(e).id&&n(e,o)}else if(d.operation==="delete"){const o=d.payload.id||d.payload;t(e)&&t(e).id===o&&Yt(F("/pools"))}}function xe(d){if(!t(e)||!t(e).instances)return;const o=d.payload;if(o.pool_id===t(e).id){if(d.operation==="create")ut(e,t(e).instances=[...t(e).instances,o]);else if(d.operation==="update")ut(e,t(e).instances=t(e).instances.map(_=>_.id===o.id?o:_));else if(d.operation==="delete"){const _=o.id||o;ut(e,t(e).instances=t(e).instances.filter(H=>H.id!==_))}n(e,t(e))}}We(()=>{ie();const d=Qt.subscribeToEntity("pool",["update","delete"],me),o=Qt.subscribeToEntity("instance",["create","update","delete"],xe);U=()=>{d(),o()}}),Ke(()=>{U&&(U(),U=null)}),Qe(()=>gt(),()=>{n(G,gt().params.id)}),Xe(),He();var pt=fa();Ye(d=>{b(()=>ta.title=`${t(e),i(()=>t(e)?`Pool ${t(e).id} - Pool Details`:"Pool Details")??""} - GARM`)});var j=B(pt),q=r(j),ft=r(q),z=r(ft),ue=r(z);a(z);var _t=s(z,2),yt=r(_t),ht=s(r(yt),2),ge=r(ht,!0);a(ht),a(yt),a(_t),a(ft),a(q);var pe=s(q,2);{var fe=d=>{var o=va();x(d,o)},_e=d=>{var o=Kt(),_=B(o);{var H=k=>{var w=ca(),S=r(w),V=r(S,!0);a(S),a(w),b(()=>l(V,t(M))),x(k,w)},we=k=>{var w=Kt(),S=B(w);{var V=W=>{var wt=pa(),$t=B(wt);{let v=N(()=>(u(P),t(e),i(()=>P(t(e))))),c=N(()=>(u(C),t(e),i(()=>C(t(e))))),m=N(()=>(u(ee),t(e),i(()=>ee(t(e).endpoint?.endpoint_type||"unknown"))));na($t,{get title(){return t(e),i(()=>t(e).id)},get subtitle(){return`Pool for ${t(v)??""} (${t(c)??""})`},get forgeIcon(){return t(m)},onEdit:()=>n(E,!0),onDelete:()=>n(A,!0)})}var K=s($t,2),Q=r(K),Pt=r(Q),It=s(r(Pt),2),X=r(It),Dt=s(r(X),2),$e=r(Dt,!0);a(Dt),a(X);var Y=s(X,2),Mt=s(r(Y),2),Pe=r(Mt,!0);a(Mt),a(Y);var Z=s(Y,2),Et=s(r(Z),2),At=r(Et),Ie=r(At,!0);a(At),a(Et),a(Z);var tt=s(Z,2),Tt=s(r(tt),2),De=r(Tt,!0);a(Tt),a(tt);var et=s(tt,2),Ut=s(r(et),2),at=r(Ut),Me=r(at,!0);a(at),a(Ut),a(et);var rt=s(et,2),St=s(r(rt),2),Bt=r(St),dt=r(Bt),Ee=r(dt,!0);a(dt);var st=s(dt,2),Ae=r(st,!0);a(st),a(Bt),a(St),a(rt);var it=s(rt,2),Nt=s(r(it),2),Te=r(Nt,!0);a(Nt),a(it);var Ft=s(it,2),Rt=s(r(Ft),2),Ue=r(Rt,!0);a(Rt),a(Ft),a(It),a(Pt),a(Q);var Ct=s(Q,2),Lt=r(Ct),Ot=s(r(Lt),2),ot=r(Ot),Gt=s(r(ot),2),Se=r(Gt,!0);a(Gt),a(ot);var nt=s(ot,2),Jt=s(r(nt),2),Be=r(Jt,!0);a(Jt),a(nt);var lt=s(nt,2),jt=s(r(lt),2),Ne=r(jt);a(jt),a(lt);var vt=s(lt,2),qt=s(r(vt),2),Fe=r(qt,!0);a(qt),a(vt);var ct=s(vt,2),zt=s(r(ct),2),Re=r(zt,!0);a(zt),a(ct);var mt=s(ct,2),Ht=s(r(mt),2),Ce=r(Ht);a(Ht),a(mt);var Vt=s(mt,2);{var Le=v=>{var c=ma(),m=s(r(c),2),f=r(m,!0);a(m),a(c),b(()=>l(f,(t(e),i(()=>t(e)["github-runner-group"])))),x(v,c)};g(Vt,v=>{t(e),i(()=>t(e)["github-runner-group"])&&v(Le)})}var Oe=s(Vt,2);{var Ge=v=>{var c=ua(),m=s(r(c),2),f=r(m);ra(f,5,()=>(t(e),i(()=>t(e).tags)),da,(I,$)=>{var xt=xa(),ze=r(xt,!0);a(xt),b(()=>l(ze,(t($),i(()=>typeof t($)=="string"?t($):t($).name)))),x(I,xt)}),a(f),a(m),a(c),x(v,c)};g(Oe,v=>{t(e),i(()=>t(e).tags&&t(e).tags.length>0)&&v(Ge)})}a(Ot),a(Lt),a(Ct),a(K);var Wt=s(K,2);{var Je=v=>{var c=ga(),m=r(c),f=s(r(m),2),I=r(f,!0);a(f),a(m),a(c),b($=>l(I,$),[()=>(t(e),i(()=>ce(t(e).extra_specs)))]),x(v,c)};g(Wt,v=>{t(e),i(()=>t(e).extra_specs)&&v(Je)})}var je=s(Wt,2);{var qe=v=>{la(v,{get instances(){return t(e),i(()=>t(e).instances)},entityType:"repository",onDeleteInstance:ve})};g(je,v=>{t(e),i(()=>t(e).instances)&&v(qe)})}b((v,c,m,f,I)=>{l($e,(t(e),i(()=>t(e).id))),l(Pe,(t(e),i(()=>t(e).provider_name))),l(Ie,(t(e),i(()=>t(e).image))),l(De,(t(e),i(()=>t(e).flavor))),sa(at,1,`inline-flex px-2 py-1 text-xs font-medium rounded-full ${t(e),i(()=>t(e).enabled?"bg-green-100 dark:bg-green-900 text-green-800 dark:text-green-200":"bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200")??""}`),l(Me,(t(e),i(()=>t(e).enabled?"Enabled":"Disabled"))),l(Ee,v),Xt(st,"href",c),l(Ae,m),l(Te,f),l(Ue,I),l(Se,(t(e),i(()=>t(e).max_runners))),l(Be,(t(e),i(()=>t(e).min_idle_runners))),l(Ne,`${t(e),i(()=>t(e).runner_bootstrap_timeout)??""} minutes`),l(Fe,(t(e),i(()=>t(e).priority))),l(Re,(t(e),i(()=>t(e).runner_prefix||"garm"))),l(Ce,`${t(e),i(()=>t(e).os_type)??""} / ${t(e),i(()=>t(e).os_arch)??""}`)},[()=>(u(C),t(e),i(()=>C(t(e)))),()=>(u(te),t(e),i(()=>te(t(e)))),()=>(u(P),t(e),i(()=>P(t(e)))),()=>(u(L),t(e),i(()=>L(t(e).created_at||""))),()=>(u(L),t(e),i(()=>L(t(e).updated_at||"")))]),x(W,wt)};g(S,W=>{t(e)&&W(V)},!0)}x(k,w)};g(_,k=>{t(M)?k(H):k(we,!1)},!0)}x(d,o)};g(pe,d=>{t(J)?d(fe):d(_e,!1)})}a(j);var bt=s(j,2);{var ye=d=>{oa(d,{get pool(){return t(e)},$$events:{close:()=>n(E,!1),submit:o=>oe(o.detail)}})};g(bt,d=>{t(E)&&t(e)&&d(ye)})}var kt=s(bt,2);{var he=d=>{{let o=N(()=>(t(e),u(P),i(()=>`Pool ${t(e).id} (${P(t(e))})`)));Zt(d,{title:"Delete Pool",message:"Are you sure you want to delete this pool? This action cannot be undone and will remove all associated runners.",get itemName(){return t(o)},$$events:{close:()=>n(A,!1),confirm:ne}})}};g(kt,d=>{t(A)&&t(e)&&d(he)})}var be=s(kt,2);{var ke=d=>{Zt(d,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return t(p),i(()=>t(p).name)},$$events:{close:()=>{n(T,!1),n(p,null)},confirm:le}})};g(be,d=>{t(T)&&t(p)&&d(ke)})}b(d=>{Xt(ue,"href",d),l(ge,(t(e),i(()=>t(e)?t(e).id:"Loading...")))},[()=>(u(F),i(()=>F("/pools")))]),x(ae,pt),Ze(),se()}export{Ua as component}; diff --git a/webapp/assets/_app/immutable/nodes/15.CAiEMQ9A.js b/webapp/assets/_app/immutable/nodes/15.CAiEMQ9A.js deleted file mode 100644 index ac8c64ef..00000000 --- a/webapp/assets/_app/immutable/nodes/15.CAiEMQ9A.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as Me}from"../chunks/zNh6Oe5P.js";import{p as Te,E as Ne,o as De,l as w,s as r,m as a,g as e,y as T,a as Se,f as S,k as l,j as c,r as u,c as D,t as Q,v as fe,x as $e,u as j,z as Re,e as ye,d as Fe,h as ze,b as Be,$ as Oe,q as Ce}from"../chunks/sWNKMed7.js";import{a as Ae,i as V,s as Ie}from"../chunks/Ccl3fNd2.js";import{r as X,b as Pe,g as oe}from"../chunks/DVl4ZBgx.js";import{e as He,i as Je}from"../chunks/BuuPrWMc.js";import{b as ve,a as Ee}from"../chunks/CLagxtgo.js";import{p as Ve}from"../chunks/D4Caz1gY.js";import{M as Ye}from"../chunks/DN14Fk2Y.js";import{e as ae}from"../chunks/BZiHL9L3.js";import{F as Ke}from"../chunks/z0u7Z3zm.js";import{e as We,a as he}from"../chunks/DA-798Ko.js";import{U as Qe}from"../chunks/XzGG0o_q.js";import{D as Xe}from"../chunks/kJjQhR0J.js";import{P as Ze}from"../chunks/qfys27k5.js";import{t as J}from"../chunks/BZUCTtPY.js";import{k as et,g as tt,c as rt,m as ot,p as at}from"../chunks/cjRLNre3.js";import{D as st,G as nt,a as it}from"../chunks/DCYYzf48.js";import{E as lt}from"../chunks/DAWfW-VQ.js";import{E as dt}from"../chunks/CARsAFuo.js";import{S as ct}from"../chunks/BJXodF8n.js";import"../chunks/C6O4o7G1.js";var ut=S('

                '),pt=S('

                Loading...

                '),mt=S(""),gt=S(''),bt=S('

                Webhook secret will be automatically generated

                '),ft=S('
                '),yt=S('

                Create Repository

                ');function vt(se,ne){Te(ne,!1);const[ie,le]=Ie(),p=()=>Ae(We,"$eagerCache",ie),R=a(),k=a(),G=a(),x=a(),C=Ne();let y=a(!1),g=a(""),b=a("github"),o=a({name:"",owner:"",credentials_name:"",webhook_secret:"",pool_balancer_type:"roundrobin"}),F=a(!0),d=a(!0);async function f(){if(!p().loaded.credentials&&!p().loading.credentials)try{await he.getCredentials()}catch(i){r(g,ae(i))}}function _(i){r(b,i.detail),T(o,e(o).credentials_name="")}function P(){if(e(o).credentials_name){const i=e(R).find(A=>A.name===e(o).credentials_name);i&&i.forge_type&&r(b,i.forge_type)}}function E(){const i=new Uint8Array(32);return crypto.getRandomValues(i),Array.from(i,A=>A.toString(16).padStart(2,"0")).join("")}De(()=>{f()});async function de(){if(!e(o).name?.trim()){r(g,"Repository name is required");return}if(!e(o).owner?.trim()){r(g,"Repository owner is required");return}if(!e(o).credentials_name){r(g,"Please select credentials");return}try{r(y,!0),r(g,"");const i={...e(o),install_webhook:e(F),auto_generate_secret:e(d)};C("submit",i)}catch(i){r(g,ae(i)),r(y,!1)}}w(()=>p(),()=>{r(R,p().credentials)}),w(()=>p(),()=>{r(k,p().loading.credentials)}),w(()=>(e(R),e(b)),()=>{r(G,e(R).filter(i=>e(b)?i.forge_type===e(b):!0))}),w(()=>e(d),()=>{e(d)?T(o,e(o).webhook_secret=E()):e(d)||T(o,e(o).webhook_secret="")}),w(()=>(e(o),e(d)),()=>{r(x,e(o).name?.trim()!==""&&e(o).owner?.trim()!==""&&e(o).credentials_name!==""&&(e(d)||e(o).webhook_secret?.trim()!==""))}),Se(),Me(),Ye(se,{$$events:{close:()=>C("close")},children:(i,A)=>{var M=yt(),Z=l(c(M),2);{var ce=v=>{var h=ut(),I=c(h),W=c(I,!0);u(I),u(h),Q(()=>fe(W,e(g))),D(v,h)};V(Z,v=>{e(g)&&v(ce)})}var ue=l(Z,2);{var pe=v=>{var h=pt();D(v,h)},me=v=>{var h=ft(),I=c(h);Ke(I,{get selectedForgeType(){return e(b)},set selectedForgeType(n){r(b,n)},$$events:{select:_},$$legacy:!0});var W=l(I,2),ee=l(c(W),2);X(ee),u(W);var L=l(W,2),N=l(c(L),2);X(N),u(L);var z=l(L,2),B=l(c(z),2);Q(()=>{e(o),$e(()=>{e(G)})});var O=c(B);O.value=O.__value="";var ge=l(O);He(ge,1,()=>e(G),Je,(n,m)=>{var U=mt(),Le=c(U);u(U);var xe={};Q(()=>{fe(Le,`${e(m),j(()=>e(m).name)??""} (${e(m),j(()=>e(m).endpoint?.name)??""})`),xe!==(xe=(e(m),j(()=>e(m).name)))&&(U.value=(U.__value=(e(m),j(()=>e(m).name)))??"")}),D(n,U)}),u(B),u(z);var H=l(z,2),Y=l(c(H),2);Q(()=>{e(o),$e(()=>{})});var K=c(Y);K.value=K.__value="roundrobin";var te=l(K);te.value=te.__value="pack",u(Y),u(H);var t=l(H,2),s=c(t),re=c(s);X(re),Re(2),u(s);var $=l(s,2),q=c($),_e=c(q);X(_e),Re(2),u(q);var qe=l(q,2);{var Ue=n=>{var m=gt();X(m),ve(m,()=>e(o).webhook_secret,U=>T(o,e(o).webhook_secret=U)),D(n,m)},je=n=>{var m=bt();D(n,m)};V(qe,n=>{e(d)?n(je,!1):n(Ue)})}u($),u(t);var we=l(t,2),ke=c(we),be=l(ke,2),Ge=c(be,!0);u(be),u(we),u(h),Q(()=>{be.disabled=e(y)||e(k)||!e(x),fe(Ge,e(y)?"Creating...":"Create Repository")}),ve(ee,()=>e(o).name,n=>T(o,e(o).name=n)),ve(N,()=>e(o).owner,n=>T(o,e(o).owner=n)),Pe(B,()=>e(o).credentials_name,n=>T(o,e(o).credentials_name=n)),ye("change",B,P),Pe(Y,()=>e(o).pool_balancer_type,n=>T(o,e(o).pool_balancer_type=n)),Ee(re,()=>e(F),n=>r(F,n)),Ee(_e,()=>e(d),n=>r(d,n)),ye("click",ke,()=>C("close")),ye("submit",h,Ve(de)),D(v,h)};V(ue,v=>{e(y)?v(pe):v(me,!1)})}u(M),D(i,M)},$$slots:{default:!0}}),Fe(),le()}var ht=S('
                ',1);function Nt(se,ne){Te(ne,!1);const[ie,le]=Ie(),p=()=>Ae(We,"$eagerCache",ie),R=a(),k=a(),G=a();let x=a([]),C=a(!0),y=a(""),g=a(""),b=a(!1),o=a(!1),F=a(!1),d=a(null),f=a(null),_=a(1),P=a(25),E=a(1);De(async()=>{try{r(C,!0);const t=await he.getRepositories();t&&Array.isArray(t)&&r(x,t)}catch(t){console.error("Failed to load repositories:",t),r(y,t instanceof Error?t.message:"Failed to load repositories")}finally{r(C,!1)}});async function de(){try{await he.retryResource("repositories")}catch(t){console.error("Retry failed:",t)}}function i(t){r(d,t),r(o,!0)}function A(t){r(f,t),r(F,!0)}function M(){r(b,!1),r(o,!1),r(F,!1),r(d,null),r(f,null),r(y,"")}async function Z(t){try{r(y,"");const s=t.detail,re={name:s.name,owner:s.owner,credentials_name:s.credentials_name,webhook_secret:s.webhook_secret},$=await oe.createRepository(re);if(s.install_webhook&&$.id)try{await oe.installRepoWebhook($.id),J.success("Webhook Installed",`Webhook for repository ${$.owner}/${$.name} has been installed successfully.`)}catch(q){console.warn("Repository created but webhook installation failed:",q),J.error("Webhook Installation Failed",q instanceof Error?q.message:"Failed to install webhook. You can try installing it manually from the repository details page.")}r(b,!1),J.success("Repository Created",`Repository ${$.owner}/${$.name} has been created successfully.`)}catch(s){throw r(y,ae(s)),s}}async function ce(t){if(e(d))try{await oe.updateRepository(e(d).id,t),J.success("Repository Updated",`Repository ${e(d).owner}/${e(d).name} has been updated successfully.`),M()}catch(s){throw s}}async function ue(){if(e(f))try{r(y,""),await oe.deleteRepository(e(f).id),J.success("Repository Deleted",`Repository ${e(f).owner}/${e(f).name} has been deleted successfully.`)}catch(t){const s=ae(t);J.error("Delete Failed",s)}finally{M()}}const pe=[{key:"repository",title:"Repository",cellComponent:lt,cellProps:{entityType:"repository",showOwner:!0}},{key:"endpoint",title:"Endpoint",cellComponent:dt},{key:"credentials",title:"Credentials",cellComponent:nt,cellProps:{field:"credentials_name"}},{key:"status",title:"Status",cellComponent:ct,cellProps:{statusType:"entity"}},{key:"actions",title:"Actions",align:"right",cellComponent:it}],me={entityType:"repository",primaryText:{field:"name",isClickable:!0,href:"/repositories/{id}",showOwner:!0},customInfo:[{icon:t=>tt(t?.endpoint?.endpoint_type||"unknown"),text:t=>t?.endpoint?.name||"Unknown"}],badges:[{type:"custom",value:t=>et(t)}],actions:[{type:"edit",handler:t=>i(t)},{type:"delete",handler:t=>A(t)}]};function v(t){r(g,t.detail.term),r(_,1)}function h(t){r(_,t.detail.page)}function I(t){const s=rt(t.detail.perPage);r(P,s.newPerPage),r(_,s.newCurrentPage)}function W(t){i(t.detail.item)}function ee(t){A(t.detail.item)}w(()=>(e(x),p()),()=>{(!e(x).length||p().loaded.repositories)&&r(x,p().repositories)}),w(()=>p(),()=>{r(C,p().loading.repositories)}),w(()=>p(),()=>{r(R,p().errorMessages.repositories)}),w(()=>(e(x),e(g)),()=>{r(k,ot(e(x),e(g)))}),w(()=>(e(E),e(k),e(P),e(_)),()=>{r(E,Math.ceil(e(k).length/e(P))),e(_)>e(E)&&e(E)>0&&r(_,e(E))}),w(()=>(e(k),e(_),e(P)),()=>{r(G,at(e(k),e(_),e(P)))}),Se(),Me();var L=ht();ze(t=>{Oe.title="Repositories - GARM"});var N=Be(L),z=c(N);Ze(z,{title:"Repositories",description:"Manage your GitHub repositories and their runners",actionLabel:"Add Repository",$$events:{action:()=>{r(b,!0)}}});var B=l(z,2);{let t=Ce(()=>e(R)||e(y)),s=Ce(()=>!!e(R));st(B,{get columns(){return pe},get data(){return e(G)},get loading(){return e(C)},get error(){return e(t)},get searchTerm(){return e(g)},searchPlaceholder:"Search repositories by name or owner...",get currentPage(){return e(_)},get perPage(){return e(P)},get totalPages(){return e(E)},get totalItems(){return e(k),j(()=>e(k).length)},itemName:"repositories",emptyIconType:"building",get showRetry(){return e(s)},get mobileCardConfig(){return me},$$events:{search:v,pageChange:h,perPageChange:I,retry:de,edit:W,delete:ee}})}u(N);var O=l(N,2);{var ge=t=>{vt(t,{$$events:{close:()=>r(b,!1),submit:Z}})};V(O,t=>{e(b)&&t(ge)})}var H=l(O,2);{var Y=t=>{Qe(t,{get entity(){return e(d)},entityType:"repository",$$events:{close:M,submit:s=>ce(s.detail)}})};V(H,t=>{e(o)&&e(d)&&t(Y)})}var K=l(H,2);{var te=t=>{Xe(t,{title:"Delete Repository",message:"Are you sure you want to delete this repository? This action cannot be undone and will remove all associated pools and runners.",get itemName(){return`${e(f),j(()=>e(f).owner)??""}/${e(f),j(()=>e(f).name)??""}`},$$events:{close:M,confirm:ue}})};V(K,t=>{e(F)&&e(f)&&t(te)})}D(se,L),Fe(),le()}export{Nt as component}; diff --git a/webapp/assets/_app/immutable/nodes/16.DBIP7RdC.js b/webapp/assets/_app/immutable/nodes/16.DBIP7RdC.js deleted file mode 100644 index 031c3008..00000000 --- a/webapp/assets/_app/immutable/nodes/16.DBIP7RdC.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as ze}from"../chunks/zNh6Oe5P.js";import{p as He,o as je,A as Ge,l as Oe,a as Ve,f as B,h as Je,b as C,t as j,c as x,d as Ke,u as n,n as de,v as ce,g as e,m as l,j as u,k as d,s as r,$ as Qe,r as f,y as Xe,B as pe,q as m}from"../chunks/sWNKMed7.js";import{i as g,s as Ye,a as Ze}from"../chunks/Ccl3fNd2.js";import{d as A,c as et,g as h}from"../chunks/DVl4ZBgx.js";import{p as tt}from"../chunks/CJwphPxi.js";import{g as ue}from"../chunks/CPCsbdkz.js";import{U as ot}from"../chunks/XzGG0o_q.js";import{D as fe}from"../chunks/kJjQhR0J.js";import{E as at,P as rt,a as st}from"../chunks/CaVdfWt-.js";import{D as nt,I as it}from"../chunks/0ZGtv6cq.js";import{g as me}from"../chunks/cjRLNre3.js";import{e as S}from"../chunks/BZiHL9L3.js";import{W as lt}from"../chunks/Clig3Vwb.js";import{C as dt}from"../chunks/BRFhz4VJ.js";import{w as G}from"../chunks/BuuPrWMc.js";import{t as w}from"../chunks/BZUCTtPY.js";var ct=B('

                Loading repository...

                '),pt=B('

                '),ut=B(" ",1),ft=B(' ',1);function kt(ye,ve){He(ve,!1);const[ge,he]=Ye(),O=()=>Ze(tt,"$page",ge),I=l();let t=l(null),c=l([]),y=l([]),F=l(!0),E=l(""),D=l(!1),P=l(!1),R=l(!1),T=l(!1),p=l(null),M=null,_=l();async function V(){if(e(I))try{r(F,!0),r(E,"");const[o,a,s]=await Promise.all([h.getRepository(e(I)),h.listRepositoryPools(e(I)).catch(()=>[]),h.listRepositoryInstances(e(I)).catch(()=>[])]);r(t,o),r(c,a),r(y,s)}catch(o){r(E,S(o))}finally{r(F,!1)}}function _e(o,a){const{events:s}=o;return{...a,events:s}}async function $e(o){if(e(t))try{await h.updateRepository(e(t).id,o),await V(),w.success("Repository Updated",`Repository ${e(t).owner}/${e(t).name} has been updated successfully.`),r(D,!1)}catch(a){throw a}}async function be(){if(e(t)){try{await h.deleteRepository(e(t).id),ue(A("/repositories"))}catch(o){const a=S(o);w.error("Delete Failed",a)}r(P,!1)}}async function xe(){if(e(p))try{await h.deleteInstance(e(p).name),w.success("Instance Deleted",`Instance ${e(p).name} has been deleted successfully.`),r(R,!1),r(p,null)}catch(o){const a=S(o);w.error("Delete Failed",a),r(R,!1),r(p,null)}}function we(o){r(p,o),r(R,!0)}function Ie(){r(T,!0)}async function Re(o){try{if(!e(t))return;await h.createRepositoryPool(e(t).id,o.detail),w.success("Pool Created",`Pool has been created successfully for repository ${e(t).owner}/${e(t).name}.`),r(T,!1)}catch(a){const s=S(a);w.error("Pool Creation Failed",s)}}function J(){e(_)&&Xe(_,e(_).scrollTop=e(_).scrollHeight)}function Ee(o){if(o.operation==="update"){const a=o.payload;if(e(t)&&a.id===e(t).id){const s=e(t).events?.length||0,i=a.events?.length||0;r(t,_e(e(t),a)),i>s&&setTimeout(()=>{J()},100)}}else if(o.operation==="delete"){const a=o.payload.id||o.payload;e(t)&&e(t).id===a&&ue(A("/repositories"))}}function De(o){if(!e(t))return;const a=o.payload;if(a.repo_id===e(t).id){if(o.operation==="create")r(c,[...e(c),a]);else if(o.operation==="update")r(c,e(c).map(s=>s.id===a.id?a:s));else if(o.operation==="delete"){const s=a.id||a;r(c,e(c).filter(i=>i.id!==s))}}}function Pe(o){if(!e(t)||!e(c))return;const a=o.payload;if(e(c).some(i=>i.id===a.pool_id)){if(o.operation==="create")r(y,[...e(y),a]);else if(o.operation==="update")r(y,e(y).map(i=>i.id===a.id?a:i));else if(o.operation==="delete"){const i=a.id||a;r(y,e(y).filter(W=>W.id!==i))}}}je(()=>{V().then(()=>{e(t)?.events?.length&&setTimeout(()=>{J()},100)});const o=G.subscribeToEntity("repository",["update","delete"],Ee),a=G.subscribeToEntity("pool",["create","update","delete"],De),s=G.subscribeToEntity("instance",["create","update","delete"],Pe);M=()=>{o(),a(),s()}}),Ge(()=>{M&&(M(),M=null)}),Oe(()=>O(),()=>{r(I,O().params.id)}),Ve(),ze();var K=ft();Je(o=>{j(()=>Qe.title=`${e(t),n(()=>e(t)?`${e(t).name} - Repository Details`:"Repository Details")??""} - GARM`)});var U=C(K),L=u(U),Q=u(L),N=u(Q),Te=u(N);f(N);var X=d(N,2),Y=u(X),Z=d(u(Y),2),Me=u(Z,!0);f(Z),f(Y),f(X),f(Q),f(L);var ke=d(L,2);{var Ce=o=>{var a=ct();x(o,a)},Ae=o=>{var a=pe(),s=C(a);{var i=$=>{var b=pt(),k=u(b),q=u(k,!0);f(k),f(b),j(()=>ce(q,e(E))),x($,b)},W=$=>{var b=pe(),k=C(b);{var q=z=>{var ae=ut(),re=C(ae);{let v=m(()=>(e(t),n(()=>e(t).name||"Repository"))),H=m(()=>(e(t),n(()=>e(t).owner))),We=m(()=>(e(t),n(()=>e(t).endpoint?.name))),qe=m(()=>(de(me),e(t),n(()=>me(e(t).endpoint?.endpoint_type||"unknown"))));nt(re,{get title(){return e(v)},get subtitle(){return`Owner: ${e(H)??""} • Endpoint: ${e(We)??""}`},get forgeIcon(){return e(qe)},onEdit:()=>r(D,!0),onDelete:()=>r(P,!0)})}var se=d(re,2);at(se,{get entity(){return e(t)},entityType:"repository"});var ne=d(se,2);{let v=m(()=>(e(t),n(()=>e(t).id||"")));lt(ne,{entityType:"repository",get entityId(){return e(v)},get entityName(){return`${e(t),n(()=>e(t).owner)??""}/${e(t),n(()=>e(t).name)??""}`}})}var ie=d(ne,2);{let v=m(()=>(e(t),n(()=>e(t).id||"")));rt(ie,{get pools(){return e(c)},entityType:"repository",get entityId(){return e(v)},get entityName(){return`${e(t),n(()=>e(t).owner)??""}/${e(t),n(()=>e(t).name)??""}`},$$events:{addPool:Ie}})}var le=d(ie,2);it(le,{get instances(){return e(y)},entityType:"repository",onDeleteInstance:we});var Ne=d(le,2);{let v=m(()=>(e(t),n(()=>e(t)?.events)));st(Ne,{get events(){return e(v)},get eventsContainer(){return e(_)},set eventsContainer(H){r(_,H)},$$legacy:!0})}x(z,ae)};g(k,z=>{e(t)&&z(q)},!0)}x($,b)};g(s,$=>{e(E)?$(i):$(W,!1)},!0)}x(o,a)};g(ke,o=>{e(F)?o(Ce):o(Ae,!1)})}f(U);var ee=d(U,2);{var Se=o=>{ot(o,{get entity(){return e(t)},entityType:"repository",$$events:{close:()=>r(D,!1),submit:a=>$e(a.detail)}})};g(ee,o=>{e(D)&&e(t)&&o(Se)})}var te=d(ee,2);{var Be=o=>{{let a=m(()=>(e(t),n(()=>`${e(t).owner}/${e(t).name}`)));fe(o,{title:"Delete Repository",message:"Are you sure you want to delete this repository? This action cannot be undone and will remove all associated pools and instances.",get itemName(){return e(a)},$$events:{close:()=>r(P,!1),confirm:be}})}};g(te,o=>{e(P)&&e(t)&&o(Be)})}var oe=d(te,2);{var Fe=o=>{fe(o,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return e(p),n(()=>e(p).name)},$$events:{close:()=>{r(R,!1),r(p,null)},confirm:xe}})};g(oe,o=>{e(R)&&e(p)&&o(Fe)})}var Ue=d(oe,2);{var Le=o=>{{let a=m(()=>(e(t),n(()=>e(t).id||"")));dt(o,{initialEntityType:"repository",get initialEntityId(){return e(a)},$$events:{close:()=>r(T,!1),submit:Re}})}};g(Ue,o=>{e(T)&&e(t)&&o(Le)})}j(o=>{et(Te,"href",o),ce(Me,(e(t),n(()=>e(t)?e(t).name:"Loading...")))},[()=>(de(A),n(()=>A("/repositories")))]),x(ye,K),Ke(),he()}export{kt as component}; diff --git a/webapp/assets/_app/immutable/nodes/17.BttmUS7o.js b/webapp/assets/_app/immutable/nodes/17.BttmUS7o.js deleted file mode 100644 index b10f85be..00000000 --- a/webapp/assets/_app/immutable/nodes/17.BttmUS7o.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as hr}from"../chunks/zNh6Oe5P.js";import{p as xr,E as Hr,o as _r,f as w,k as a,j as o,g as e,m as i,r as s,t as _,s as r,e as ne,c as f,v as X,b as kr,z as pr,x as he,D as Be,d as wr,l as q,a as jr,h as Nr,$ as Ur,q as Je,u as mr,n as qr}from"../chunks/sWNKMed7.js";import{i as D,s as Or,a as Lr}from"../chunks/Ccl3fNd2.js";import{r as T,s as Ve,b as xe,g as R}from"../chunks/DVl4ZBgx.js";import{P as Br}from"../chunks/qfys27k5.js";import{e as vr,i as fr}from"../chunks/BuuPrWMc.js";import{b as H,a as Jr}from"../chunks/CLagxtgo.js";import{p as Vr}from"../chunks/D4Caz1gY.js";import{M as Fr}from"../chunks/DN14Fk2Y.js";import{J as Wr}from"../chunks/CkYhV7Br.js";import{e as Y}from"../chunks/BZiHL9L3.js";import{U as Kr}from"../chunks/vrFkfzoI.js";import{D as Qr}from"../chunks/kJjQhR0J.js";import{e as Xr,a as yr}from"../chunks/DA-798Ko.js";import{t as _e}from"../chunks/BZUCTtPY.js";import{e as ce,h as Yr}from"../chunks/cjRLNre3.js";import{D as Zr,G as Fe,a as et}from"../chunks/DCYYzf48.js";import{E as rt}from"../chunks/DAWfW-VQ.js";import{E as tt}from"../chunks/CARsAFuo.js";import{S as at}from"../chunks/BJXodF8n.js";import{P as ot}from"../chunks/BaVaT5nU.js";import"../chunks/C6O4o7G1.js";var st=w('

                '),lt=w('
                '),it=w(""),dt=w(''),nt=w('
                '),ct=w(""),ut=w(''),gt=w('

                Entity & Provider Configuration

                Image & OS Configuration

                Runner Limits & Timing

                Advanced Settings

                Extra Specs (JSON)
                ',1),bt=w('
                Creating...
                '),pt=w('

                Create New Scale Set

                Scale sets are only available for GitHub endpoints

                Entity Level *
                ');function mt(ke,we){xr(we,!1);const Z=Hr();let O=i(!1),b=i(""),n=i(""),m=i([]),S=i([]),L=i(!1),C=i(!1),$=i(""),v=i(""),k=i(""),u=i(""),y=i(""),A=i(void 0),I=i(void 0),z=i(void 0),d=i("garm"),B=i("linux"),J=i("amd64"),ee=i(""),re=i(!0),G=i("{}");async function ue(){try{r(C,!0),r(S,await R.listProviders())}catch(p){r(b,Y(p))}finally{r(C,!1)}}async function Se(){if(e(n))try{switch(r(L,!0),r(m,[]),e(n)){case"repository":r(m,await R.listRepositories());break;case"organization":r(m,await R.listOrganizations());break;case"enterprise":r(m,await R.listEnterprises());break}}catch(p){r(b,Y(p))}finally{r(L,!1)}}function te(p){e(n)!==p&&(r(n,p),r(v,""),Se())}async function Ce(){if(!e($)||!e(n)||!e(v)||!e(k)||!e(u)||!e(y)){r(b,"Please fill in all required fields");return}try{r(O,!0),r(b,"");let p={};if(e(G).trim())try{p=JSON.parse(e(G))}catch{throw new Error("Invalid JSON in extra specs")}const V={name:e($),provider_name:e(k),image:e(u),flavor:e(y),max_runners:e(A)||10,min_idle_runners:e(I)||0,runner_bootstrap_timeout:e(z)||20,runner_prefix:e(d),os_type:e(B),os_arch:e(J),"github-runner-group":e(ee)||void 0,enabled:e(re),extra_specs:e(G).trim()?p:void 0};let P;switch(e(n)){case"repository":P=await R.createRepositoryScaleSet(e(v),V);break;case"organization":P=await R.createOrganizationScaleSet(e(v),V);break;case"enterprise":P=await R.createEnterpriseScaleSet(e(v),V);break;default:throw new Error("Invalid entity level selected")}Z("submit",P)}catch(p){r(b,Y(p))}finally{r(O,!1)}}_r(()=>{ue()}),hr(),Fr(ke,{$$events:{close:()=>Z("close")},children:(p,V)=>{var P=pt(),F=a(o(P),2),ge=o(F);{var be=g=>{var M=st(),K=o(M),fe=o(K,!0);s(K),s(M),_(()=>X(fe,e(b))),f(g,M)};D(ge,g=>{e(b)&&g(be)})}var j=a(ge,2),ae=a(o(j),2);T(ae),s(j);var oe=a(j,2),se=o(oe),pe=a(o(se),2),W=o(pe),le=a(W,2),me=a(le,2);s(pe),s(se),s(oe);var ve=a(oe,2);{var t=g=>{var M=gt(),K=kr(M),fe=a(o(K),2),Pe=o(fe),Me=o(Pe),Pr=o(Me);pr(),s(Me);var Mr=a(Me,2);{var Er=l=>{var h=lt();f(l,h)},Tr=l=>{var h=dt();_(()=>{e(v),he(()=>{e(n),e(m)})});var E=o(h),Le=o(E);s(E),E.value=E.__value="";var ie=a(E);vr(ie,1,()=>e(m),fr,(N,x)=>{var U=it(),ye=o(U);{var zr=Q=>{var de=Be();_(()=>X(de,`${e(x).owner??""}/${e(x).name??""} (${e(x).endpoint?.name||"Unknown endpoint"})`)),f(Q,de)},Gr=Q=>{var de=Be();_(()=>X(de,`${e(x).name??""} (${e(x).endpoint?.name||"Unknown endpoint"})`)),f(Q,de)};D(ye,Q=>{e(n)==="repository"?Q(zr):Q(Gr,!1)})}s(U);var br={};_(()=>{br!==(br=e(x).id)&&(U.value=(U.__value=e(x).id)??"")}),f(N,U)}),s(h),_(()=>X(Le,`Select a ${e(n)??""}`)),xe(h,()=>e(v),N=>r(v,N)),f(l,h)};D(Mr,l=>{e(L)?l(Er):l(Tr,!1)})}s(Pe);var Ke=a(Pe,2),Rr=a(o(Ke),2);{var Dr=l=>{var h=nt();f(l,h)},Ar=l=>{var h=ut();_(()=>{e(k),he(()=>{e(S)})});var E=o(h);E.value=E.__value="";var Le=a(E);vr(Le,1,()=>e(S),fr,(ie,N)=>{var x=ct(),U=o(x,!0);s(x);var ye={};_(()=>{X(U,e(N).name),ye!==(ye=e(N).name)&&(x.value=(x.__value=e(N).name)??"")}),f(ie,x)}),s(h),xe(h,()=>e(k),ie=>r(k,ie)),f(l,h)};D(Rr,l=>{e(C)?l(Dr):l(Ar,!1)})}s(Ke),s(fe),s(K);var Ee=a(K,2),Qe=a(o(Ee),2),Te=o(Qe),Xe=a(o(Te),2);T(Xe),s(Te);var Re=a(Te,2),Ye=a(o(Re),2);T(Ye),s(Re);var De=a(Re,2),Ae=a(o(De),2);_(()=>{e(B),he(()=>{})});var Ie=o(Ae);Ie.value=Ie.__value="linux";var Ze=a(Ie);Ze.value=Ze.__value="windows",s(Ae),s(De);var er=a(De,2),ze=a(o(er),2);_(()=>{e(J),he(()=>{})});var Ge=o(ze);Ge.value=Ge.__value="amd64";var rr=a(Ge);rr.value=rr.__value="arm64",s(ze),s(er),s(Qe),s(Ee);var He=a(Ee,2),tr=a(o(He),2),je=o(tr),ar=a(o(je),2);T(ar),s(je);var Ne=a(je,2),or=a(o(Ne),2);T(or),s(Ne);var sr=a(Ne,2),lr=a(o(sr),2);T(lr),s(sr),s(tr),s(He);var ir=a(He,2),Ue=a(o(ir),2),qe=o(Ue),dr=a(o(qe),2);T(dr),s(qe);var nr=a(qe,2),cr=a(o(nr),2);T(cr),s(nr),s(Ue);var Oe=a(Ue,2),Ir=a(o(Oe),2);Wr(Ir,{rows:4,placeholder:"{}",get value(){return e(G)},set value(l){r(G,l)},$$legacy:!0}),s(Oe);var ur=a(Oe,2),gr=o(ur);T(gr),pr(2),s(ur),s(ir),_(l=>X(Pr,`${l??""} `),[()=>e(n).charAt(0).toUpperCase()+e(n).slice(1)]),H(Xe,()=>e(u),l=>r(u,l)),H(Ye,()=>e(y),l=>r(y,l)),xe(Ae,()=>e(B),l=>r(B,l)),xe(ze,()=>e(J),l=>r(J,l)),H(ar,()=>e(I),l=>r(I,l)),H(or,()=>e(A),l=>r(A,l)),H(lr,()=>e(z),l=>r(z,l)),H(dr,()=>e(d),l=>r(d,l)),H(cr,()=>e(ee),l=>r(ee,l)),Jr(gr,()=>e(re),l=>r(re,l)),f(g,M)};D(ve,g=>{e(n)&&g(t)})}var c=a(ve,2),We=o(c),$e=a(We,2),Sr=o($e);{var Cr=g=>{var M=bt();f(g,M)},$r=g=>{var M=Be("Create Scale Set");f(g,M)};D(Sr,g=>{e(O)?g(Cr):g($r,!1)})}s($e),s(c),s(F),s(P),_(()=>{Ve(W,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="repository"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),Ve(le,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="organization"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),Ve(me,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="enterprise"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),$e.disabled=e(O)||!e($)||!e(n)||!e(v)||!e(k)||!e(u)||!e(y)}),H(ae,()=>e($),g=>r($,g)),ne("click",W,()=>te("repository")),ne("click",le,()=>te("organization")),ne("click",me,()=>te("enterprise")),ne("click",We,()=>Z("close")),ne("submit",F,Vr(Ce)),f(p,P)},$$slots:{default:!0}}),wr()}const vt={};var ft=w('
                ',1);function qt(ke,we){xr(we,!1);const[Z,O]=Or(),b=()=>Lr(Xr,"$eagerCache",Z),n=i(),m=i(),S=i(),L=i();let C=i([]),$=i(!0),v=i(""),k=i(""),u=i(1),y=i(25),A=i(!1),I=i(!1),z=i(!1),d=i(null);async function B(t){try{r(v,""),r(A,!1),_e.success("Scale Set Created","Scale set has been created successfully.")}catch(c){throw r(v,Y(c)),c}}async function J(t){if(e(d))try{await R.updateScaleSet(e(d).id,t),_e.success("Scale Set Updated",`Scale set ${e(d).name} has been updated successfully.`),r(I,!1),r(d,null)}catch(c){throw c}}async function ee(){if(e(d))try{await R.deleteScaleSet(e(d).id),_e.success("Scale Set Deleted",`Scale set ${e(d).name} has been deleted successfully.`)}catch(t){const c=Y(t);_e.error("Delete Failed",c)}finally{r(z,!1),r(d,null)}}function re(){r(A,!0)}function G(t){r(d,t),r(I,!0)}function ue(t){r(d,t),r(z,!0)}_r(async()=>{try{r($,!0);const t=await yr.getScaleSets();t&&Array.isArray(t)&&r(C,t)}catch(t){vt?.VITEST||console.error("Failed to load scale sets:",t),r(v,Y(t))}finally{r($,!1)}});async function Se(){try{await yr.retryResource("scalesets")}catch(t){console.error("Retry failed:",t)}}const te=[{key:"name",title:"Name",cellComponent:rt,cellProps:{entityType:"scaleset"}},{key:"image",title:"Image",cellComponent:Fe,cellProps:{field:"image",type:"code",showTitle:!0}},{key:"provider",title:"Provider",cellComponent:Fe,cellProps:{field:"provider_name"}},{key:"flavor",title:"Flavor",cellComponent:Fe,cellProps:{field:"flavor"}},{key:"entity",title:"Entity",cellComponent:ot},{key:"endpoint",title:"Endpoint",cellComponent:tt},{key:"status",title:"Status",cellComponent:at,cellProps:{statusType:"enabled"}},{key:"actions",title:"Actions",align:"right",cellComponent:et}],Ce={entityType:"scaleset",primaryText:{field:"name",isClickable:!0,href:"/scalesets/{id}"},secondaryText:{field:"entity_name",computedValue:t=>ce(t)},badges:[{type:"custom",value:t=>({variant:t.enabled?"success":"error",text:t.enabled?"Enabled":"Disabled"})}],actions:[{type:"edit",handler:t=>G(t)},{type:"delete",handler:t=>ue(t)}]};function p(t){r(k,t.detail.term),r(u,1)}function V(t){r(u,t.detail.page)}function P(t){r(y,t.detail.perPage),r(u,1)}function F(t){G(t.detail.item)}function ge(t){ue(t.detail.item)}q(()=>(e(C),b()),()=>{(!e(C).length||b().loaded.scalesets)&&r(C,b().scalesets)}),q(()=>b(),()=>{r($,b().loading.scalesets)}),q(()=>b(),()=>{r(n,b().errorMessages.scalesets)}),q(()=>(e(C),e(k),ce),()=>{r(m,Yr(e(C),e(k),t=>ce(t)))}),q(()=>(e(m),e(y)),()=>{r(S,Math.ceil(e(m).length/e(y)))}),q(()=>(e(u),e(S)),()=>{e(u)>e(S)&&e(S)>0&&r(u,e(S))}),q(()=>(e(m),e(u),e(y)),()=>{r(L,e(m).slice((e(u)-1)*e(y),e(u)*e(y)))}),jr(),hr();var be=ft();Nr(t=>{Ur.title="Scale Sets - GARM"});var j=kr(be),ae=o(j);Br(ae,{title:"Scale Sets",description:"Manage GitHub runner scale sets",actionLabel:"Add Scale Set",$$events:{action:re}});var oe=a(ae,2);{let t=Je(()=>e(n)||e(v)),c=Je(()=>!!e(n));Zr(oe,{get columns(){return te},get data(){return e(L)},get loading(){return e($)},get error(){return e(t)},get searchTerm(){return e(k)},searchPlaceholder:"Search by entity name...",get currentPage(){return e(u)},get perPage(){return e(y)},get totalPages(){return e(S)},get totalItems(){return e(m),mr(()=>e(m).length)},itemName:"scale sets",emptyIconType:"cog",get showRetry(){return e(c)},get mobileCardConfig(){return Ce},$$events:{search:p,pageChange:V,perPageChange:P,retry:Se,edit:F,delete:ge}})}s(j);var se=a(j,2);{var pe=t=>{mt(t,{$$events:{close:()=>r(A,!1),submit:c=>B(c.detail)}})};D(se,t=>{e(A)&&t(pe)})}var W=a(se,2);{var le=t=>{Kr(t,{get scaleSet(){return e(d)},$$events:{close:()=>{r(I,!1),r(d,null)},submit:c=>J(c.detail)}})};D(W,t=>{e(I)&&e(d)&&t(le)})}var me=a(W,2);{var ve=t=>{{let c=Je(()=>(e(d),qr(ce),mr(()=>`Scale Set ${e(d).name} (${ce(e(d))})`)));Qr(t,{title:"Delete Scale Set",message:"Are you sure you want to delete this scale set? This action cannot be undone and will remove all associated runners.",get itemName(){return e(c)},$$events:{close:()=>{r(z,!1),r(d,null)},confirm:ee}})}};D(me,t=>{e(z)&&e(d)&&t(ve)})}f(ke,be),wr(),O()}export{qt as component}; diff --git a/webapp/assets/_app/immutable/nodes/18.Cp7csezF.js b/webapp/assets/_app/immutable/nodes/18.Cp7csezF.js deleted file mode 100644 index 196c10fc..00000000 --- a/webapp/assets/_app/immutable/nodes/18.Cp7csezF.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as Je}from"../chunks/zNh6Oe5P.js";import{p as Pe,o as je,A as qe,l as ze,a as Ve,f as w,h as We,b as U,t as k,c as p,d as Ke,u as i,n as m,v as o,g as t,m as _,j as a,k as d,s as l,$ as Qe,r,y as vt,B as zt,q as $}from"../chunks/sWNKMed7.js";import{i as g,s as Xe,a as Ye}from"../chunks/Ccl3fNd2.js";import{d as B,c as Vt,g as R,s as Ze}from"../chunks/DVl4ZBgx.js";import{p as ta}from"../chunks/CJwphPxi.js";import{g as Wt}from"../chunks/CPCsbdkz.js";import{U as ea}from"../chunks/vrFkfzoI.js";import{D as Kt}from"../chunks/kJjQhR0J.js";import{D as aa,I as ra}from"../chunks/0ZGtv6cq.js";import{w as Qt}from"../chunks/BuuPrWMc.js";import{t as F}from"../chunks/BZUCTtPY.js";import{e as Xt}from"../chunks/BZiHL9L3.js";import{e as S,i as C,j as Yt,b as G,g as Zt}from"../chunks/cjRLNre3.js";var sa=w('

                Loading scale set...

                '),da=w('

                '),ia=w('
                GitHub Runner Group
                '),na=w('

                Extra Specifications

                 
                '),la=w('

                Basic Information

                Scale Set ID
                Name
                Provider
                Image
                Flavor
                Status
                Entity
                Created At
                Updated At

                Configuration

                Max Runners
                Min Idle Runners
                Bootstrap Timeout
                Runner Prefix
                OS Type / Architecture
                ',1),oa=w(' ',1);function wa(te,ee){Pe(ee,!1);const[ae,re]=Xe(),mt=()=>Ye(ta,"$page",ae),I=_();let e=_(null),L=_(!0),D=_(""),E=_(!1),M=_(!1),A=_(!1),u=_(null),N=null;async function ut(){if(!(!t(I)||isNaN(t(I))))try{l(L,!0),l(D,""),l(e,await R.getScaleSet(t(I)))}catch(s){l(D,s instanceof Error?s.message:"Failed to load scale set")}finally{l(L,!1)}}async function se(s){if(t(e))try{await R.updateScaleSet(t(e).id,s),await ut(),F.success("Scale Set Updated",`Scale Set ${t(e).name} has been updated successfully.`),l(E,!1)}catch(n){throw n}}async function de(){if(t(e)){try{await R.deleteScaleSet(t(e).id),Wt(B("/scalesets"))}catch(s){const n=Xt(s);F.error("Delete Failed",n)}l(M,!1)}}async function ie(){if(t(u)){try{await R.deleteInstance(t(u).name),F.success("Instance Deleted",`Instance ${t(u).name} has been deleted successfully.`)}catch(s){const n=Xt(s);F.error("Delete Failed",n)}l(A,!1),l(u,null)}}function ne(s){l(u,s),l(A,!0)}function le(s){if(!s)return"{}";try{if(typeof s=="string"){const n=JSON.parse(s);return JSON.stringify(n,null,2)}return JSON.stringify(s,null,2)}catch{return s.toString()}}function oe(s){if(s.operation==="update"){const n=s.payload;t(e)&&n.id===t(e).id&&l(e,n)}else if(s.operation==="delete"){const n=s.payload.id||s.payload;t(e)&&t(e).id===n&&Wt(B("/scalesets"))}}function ce(s){if(!t(e)||!t(e).instances)return;const n=s.payload;if(n.scale_set_id===t(e).id){if(s.operation==="create")vt(e,t(e).instances=[...t(e).instances,n]);else if(s.operation==="update")vt(e,t(e).instances=t(e).instances.map(y=>y.id===n.id?n:y));else if(s.operation==="delete"){const y=n.id||n;vt(e,t(e).instances=t(e).instances.filter(P=>P.id!==y))}l(e,t(e))}}je(()=>{ut();const s=Qt.subscribeToEntity("scaleset",["update","delete"],oe),n=Qt.subscribeToEntity("instance",["create","update","delete"],ce);N=()=>{s(),n()}}),qe(()=>{N&&(N(),N=null)}),ze(()=>mt(),()=>{l(I,parseInt(mt().params.id||"0"))}),Ve(),Je();var xt=oa();We(s=>{k(()=>Qe.title=`${t(e),i(()=>t(e)?`${t(e).name} - Scale Set Details`:"Scale Set Details")??""} - GARM`)});var O=U(xt),H=a(O),gt=a(H),J=a(gt),ve=a(J);r(J);var ft=d(J,2),pt=a(ft),_t=d(a(pt),2),me=a(_t,!0);r(_t),r(pt),r(ft),r(gt),r(H);var ue=d(H,2);{var xe=s=>{var n=sa();p(s,n)},ge=s=>{var n=zt(),y=U(n);{var P=h=>{var b=da(),T=a(b),j=a(T,!0);r(T),r(b),k(()=>o(j,t(D))),p(h,b)},he=h=>{var b=zt(),T=U(b);{var j=q=>{var bt=la(),kt=U(bt);{let c=$(()=>(t(e),i(()=>t(e).name||"Scale Set"))),v=$(()=>(m(S),t(e),i(()=>S(t(e))))),x=$(()=>(m(C),t(e),i(()=>C(t(e))))),f=$(()=>(m(Zt),i(()=>Zt("github"))));aa(kt,{get title(){return t(c)},get subtitle(){return`Scale set for ${t(v)??""} (${t(x)??""}) • GitHub Runner Scale Set`},get forgeIcon(){return t(f)},onEdit:()=>l(E,!0),onDelete:()=>l(M,!0)})}var z=d(kt,2),V=a(z),St=a(V),wt=d(a(St),2),W=a(wt),$t=d(a(W),2),be=a($t,!0);r($t),r(W);var K=d(W,2),It=d(a(K),2),ke=a(It,!0);r(It),r(K);var Q=d(K,2),Dt=d(a(Q),2),Se=a(Dt,!0);r(Dt),r(Q);var X=d(Q,2),Et=d(a(X),2),Mt=a(Et),we=a(Mt,!0);r(Mt),r(Et),r(X);var Y=d(X,2),At=d(a(Y),2),$e=a(At,!0);r(At),r(Y);var Z=d(Y,2),Nt=d(a(Z),2),tt=a(Nt),Ie=a(tt,!0);r(tt),r(Nt),r(Z);var et=d(Z,2),Tt=d(a(et),2),Ut=a(Tt),at=a(Ut),De=a(at,!0);r(at);var rt=d(at,2),Ee=a(rt,!0);r(rt),r(Ut),r(Tt),r(et);var st=d(et,2),Bt=d(a(st),2),Me=a(Bt,!0);r(Bt),r(st);var Rt=d(st,2),Ft=d(a(Rt),2),Ae=a(Ft,!0);r(Ft),r(Rt),r(wt),r(St),r(V);var Ct=d(V,2),Gt=a(Ct),Lt=d(a(Gt),2),dt=a(Lt),Ot=d(a(dt),2),Ne=a(Ot,!0);r(Ot),r(dt);var it=d(dt,2),Ht=d(a(it),2),Te=a(Ht,!0);r(Ht),r(it);var nt=d(it,2),Jt=d(a(nt),2),Ue=a(Jt);r(Jt),r(nt);var lt=d(nt,2),Pt=d(a(lt),2),Be=a(Pt,!0);r(Pt),r(lt);var ot=d(lt,2),jt=d(a(ot),2),Re=a(jt);r(jt),r(ot);var Fe=d(ot,2);{var Ce=c=>{var v=ia(),x=d(a(v),2),f=a(x,!0);r(x),r(v),k(()=>o(f,(t(e),i(()=>t(e)["github-runner-group"])))),p(c,v)};g(Fe,c=>{t(e),i(()=>t(e)["github-runner-group"])&&c(Ce)})}r(Lt),r(Gt),r(Ct),r(z);var qt=d(z,2);{var Ge=c=>{var v=na(),x=a(v),f=d(a(x),2),ct=a(f,!0);r(f),r(x),r(v),k(He=>o(ct,He),[()=>(t(e),i(()=>le(t(e).extra_specs)))]),p(c,v)};g(qt,c=>{t(e),i(()=>t(e).extra_specs)&&c(Ge)})}var Le=d(qt,2);{var Oe=c=>{ra(c,{get instances(){return t(e),i(()=>t(e).instances)},entityType:"scaleset",onDeleteInstance:ne})};g(Le,c=>{t(e),i(()=>t(e).instances)&&c(Oe)})}k((c,v,x,f,ct)=>{o(be,(t(e),i(()=>t(e).id))),o(ke,(t(e),i(()=>t(e).name))),o(Se,(t(e),i(()=>t(e).provider_name))),o(we,(t(e),i(()=>t(e).image))),o($e,(t(e),i(()=>t(e).flavor))),Ze(tt,1,`inline-flex px-2 py-1 text-xs font-medium rounded-full ${t(e),i(()=>t(e).enabled?"bg-green-100 dark:bg-green-900 text-green-800 dark:text-green-200":"bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200")??""}`),o(Ie,(t(e),i(()=>t(e).enabled?"Enabled":"Disabled"))),o(De,c),Vt(rt,"href",v),o(Ee,x),o(Me,f),o(Ae,ct),o(Ne,(t(e),i(()=>t(e).max_runners))),o(Te,(t(e),i(()=>t(e).min_idle_runners))),o(Ue,`${t(e),i(()=>t(e).runner_bootstrap_timeout)??""} minutes`),o(Be,(t(e),i(()=>t(e).runner_prefix||"garm"))),o(Re,`${t(e),i(()=>t(e).os_type)??""} / ${t(e),i(()=>t(e).os_arch)??""}`)},[()=>(m(C),t(e),i(()=>C(t(e)))),()=>(m(Yt),t(e),i(()=>Yt(t(e)))),()=>(m(S),t(e),i(()=>S(t(e)))),()=>(m(G),t(e),i(()=>G(t(e).created_at||""))),()=>(m(G),t(e),i(()=>G(t(e).updated_at||"")))]),p(q,bt)};g(T,q=>{t(e)&&q(j)},!0)}p(h,b)};g(y,h=>{t(D)?h(P):h(he,!1)},!0)}p(s,n)};g(ue,s=>{t(L)?s(xe):s(ge,!1)})}r(O);var yt=d(O,2);{var fe=s=>{ea(s,{get scaleSet(){return t(e)},$$events:{close:()=>l(E,!1),submit:n=>se(n.detail)}})};g(yt,s=>{t(E)&&t(e)&&s(fe)})}var ht=d(yt,2);{var pe=s=>{{let n=$(()=>(t(e),m(S),i(()=>`Scale Set ${t(e).name} (${S(t(e))})`)));Kt(s,{title:"Delete Scale Set",message:"Are you sure you want to delete this scale set? This action cannot be undone and will remove all associated runners.",get itemName(){return t(n)},$$events:{close:()=>l(M,!1),confirm:de}})}};g(ht,s=>{t(M)&&t(e)&&s(pe)})}var _e=d(ht,2);{var ye=s=>{Kt(s,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return t(u),i(()=>t(u).name)},$$events:{close:()=>{l(A,!1),l(u,null)},confirm:ie}})};g(_e,s=>{t(A)&&t(u)&&s(ye)})}k(s=>{Vt(ve,"href",s),o(me,(t(e),i(()=>t(e)?t(e).name:"Loading...")))},[()=>(m(B),i(()=>B("/scalesets")))]),p(te,xt),Ke(),re()}export{wa as component}; diff --git a/webapp/assets/_app/immutable/nodes/2.XEoeauIa.js b/webapp/assets/_app/immutable/nodes/2.XEoeauIa.js deleted file mode 100644 index efdf71f2..00000000 --- a/webapp/assets/_app/immutable/nodes/2.XEoeauIa.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as Fe}from"../chunks/zNh6Oe5P.js";import{f as h,k as a,j as r,r as t,t as D,v as S,c as m,B as He,b as ze,p as Oe,E as vt,l as Le,s as v,m as I,g as e,a as Ne,C as ut,z as ve,n as _,u,d as Qe,e as Ge,o as mt,A as gt,h as bt,$ as pt,y as Q}from"../chunks/sWNKMed7.js";import{p as fe,i as z,s as ft,a as xt}from"../chunks/Ccl3fNd2.js";import{e as ht,w as Me,i as kt}from"../chunks/BuuPrWMc.js";import{s as xe,B as yt,r as Ue,g as Ke,d as P,c as ue}from"../chunks/DVl4ZBgx.js";import{e as _t,a as Ce}from"../chunks/DA-798Ko.js";import{b as Re}from"../chunks/CLagxtgo.js";import{p as wt}from"../chunks/D4Caz1gY.js";import{M as Mt}from"../chunks/DN14Fk2Y.js";import{t as Je}from"../chunks/BZUCTtPY.js";import{e as Ut}from"../chunks/BZiHL9L3.js";var Ct=h('
                '),Rt=h('
                '),zt=h('
                '),Lt=h('
                '),St=h('
                ');function pe(me,V){let f=fe(V,"title",8),re=fe(V,"content",8),s=fe(V,"position",8,"top"),ae=fe(V,"width",8,"w-80");var l=St(),k=a(r(l),2),p=r(k),y=r(p,!0);t(p);var d=a(p,2),L=r(d,!0);t(d);var oe=a(d,2);{var ge=E=>{var q=Ct();m(E,q)},se=E=>{var q=He(),J=ze(q);{var F=j=>{var T=Rt();m(j,T)},K=j=>{var T=He(),ie=ze(T);{var le=$=>{var H=zt();m($,H)},ne=$=>{var H=He(),X=ze(H);{var de=i=>{var o=Lt();m(i,o)};z(X,i=>{s()==="right"&&i(de)},!0)}m($,H)};z(ie,$=>{s()==="left"?$(le):$(ne,!1)},!0)}m(j,T)};z(J,j=>{s()==="bottom"?j(F):j(K,!1)},!0)}m(E,q)};z(oe,E=>{s()==="top"?E(ge):E(se,!1)})}t(k),t(l),D(()=>{xe(k,1,`absolute ${s()==="top"?"bottom-full":s()==="bottom"?"top-full":s()==="left"?"right-full top-1/2 -translate-y-1/2":"left-full top-1/2 -translate-y-1/2"} left-1/2 transform -translate-x-1/2 ${s()==="top"?"mb-2":s()==="bottom"?"mt-2":"mx-2"} ${ae()??""} p-3 bg-gray-900 text-white text-xs rounded-lg shadow-lg opacity-0 invisible group-hover:opacity-100 group-hover:visible transition-all duration-200 z-50`),S(y,f()),S(L,re())}),m(me,l)}var jt=ut(' Settings',1),$t=h('
                Metadata
                '),Bt=h('
                Callback
                '),At=h('
                Webhook
                '),It=h('

                No URLs configured

                '),Et=h('
                Controller Webhook URL

                Use this URL in your GitHub organization/repository webhook settings

                '),Ht=h('

                Please enter a valid URL

                '),Gt=h('

                Please enter a valid URL

                '),Pt=h('

                Please enter a valid URL

                '),qt=h('

                Controller Settings

                URL where runners can fetch metadata and setup information

                URL where runners send status updates and lifecycle events

                URL where GitHub/Gitea will send webhook events for job notifications

                Time to wait before spinning up a runner for a new job (0 = immediate)

                '),Tt=h('

                Controller Information

                Identity

                Controller ID
                Hostname
                Job Age Backoff

                Integration URLs

                ',1);function Dt(me,V){Oe(V,!1);const f=I(),re=I();let s=fe(V,"controllerInfo",12);const ae=vt();let l=I(!1),k=I(!1),p=I(""),y=I(""),d=I(""),L=I(null);function oe(){v(p,s().metadata_url||""),v(y,s().callback_url||""),v(d,s().webhook_url||""),v(L,s().minimum_job_age_backoff||null),v(l,!0)}async function ge(){try{v(k,!0);const n={};e(p).trim()&&(n.metadata_url=e(p).trim()),e(y).trim()&&(n.callback_url=e(y).trim()),e(d).trim()&&(n.webhook_url=e(d).trim()),e(L)!==null&&e(L)>=0&&(n.minimum_job_age_backoff=e(L));const c=await Ke.updateController(n);Je.success("Settings Updated","Controller settings have been updated successfully."),v(l,!1),s(c),ae("updated",c)}catch(n){Je.error("Update Failed",n instanceof Error?n.message:"Failed to update controller settings")}finally{v(k,!1)}}function se(){v(l,!1),v(p,""),v(y,""),v(d,""),v(L,null)}Le(()=>{},()=>{v(f,n=>{if(!n.trim())return!0;try{return new URL(n),!0}catch{return!1}})}),Le(()=>(e(f),e(p),e(y),e(d),e(L)),()=>{v(re,e(f)(e(p))&&e(f)(e(y))&&e(f)(e(d))&&(e(L)===null||e(L)>=0))}),Ne(),Fe();var E=Tt(),q=ze(E),J=r(q),F=r(J),K=r(F),j=a(r(K),2),T=a(r(j),2),ie=r(T),le=r(ie);t(ie),t(T),t(j),t(K);var ne=a(K,2);yt(ne,{variant:"secondary",size:"sm",$$events:{click:oe},children:(n,c)=>{var b=jt();ve(),m(n,b)},$$slots:{default:!0}}),t(F);var $=a(F,2),H=r($),X=r(H),de=a(r(X),2),i=r(de),o=a(r(i),2),w=r(o,!0);t(o),t(i);var M=a(i,2),x=a(r(M),2),U=r(x,!0);t(x),t(M);var A=a(M,2),W=r(A),Y=a(r(W),2),Z=r(Y);pe(Z,{title:"Job Age Backoff",content:"Time in seconds GARM waits after receiving a new job before spinning up a runner. This delay allows existing idle runners to pick up jobs first, preventing unnecessary runner creation. Set to 0 for immediate response."}),t(Y),t(W);var O=a(W,2),N=r(O);t(O),t(A),t(de),t(X),t(H);var ee=a(H,2),ce=r(ee),he=a(r(ce),2),ke=r(he);{var Se=n=>{var c=$t(),b=r(c),C=a(r(b),2),G=r(C);pe(G,{title:"Metadata URL",content:"URL where runners retrieve setup information and metadata. Runners must be able to connect to this URL during their initialization process. Usually accessible at /api/v1/metadata endpoint."}),t(C),t(b);var R=a(b,2),B=r(R,!0);t(R),t(c),D(()=>S(B,(_(s()),u(()=>s().metadata_url)))),m(n,c)};z(ke,n=>{_(s()),u(()=>s().metadata_url)&&n(Se)})}var Pe=a(ke,2);{var Xe=n=>{var c=Bt(),b=r(c),C=a(r(b),2),G=r(C);pe(G,{title:"Callback URL",content:"URL where runners send status updates and system information (OS version, runner agent ID, etc.) to the controller. Runners must be able to connect to this URL. Usually accessible at /api/v1/callbacks endpoint."}),t(C),t(b);var R=a(b,2),B=r(R,!0);t(R),t(c),D(()=>S(B,(_(s()),u(()=>s().callback_url)))),m(n,c)};z(Pe,n=>{_(s()),u(()=>s().callback_url)&&n(Xe)})}var qe=a(Pe,2);{var Ye=n=>{var c=At(),b=r(c),C=a(r(b),2),G=r(C);pe(G,{title:"Webhook Base URL",content:"Base URL for webhooks where GitHub sends job notifications. GARM needs to receive these webhooks to know when to create new runners for jobs. GitHub must be able to connect to this URL. Usually accessible at /webhooks endpoint."}),t(C),t(b);var R=a(b,2),B=r(R,!0);t(R),t(c),D(()=>S(B,(_(s()),u(()=>s().webhook_url)))),m(n,c)};z(qe,n=>{_(s()),u(()=>s().webhook_url)&&n(Ye)})}var Ze=a(qe,2);{var et=n=>{var c=It(),b=a(r(c),4);t(c),Ge("click",b,oe),m(n,c)};z(Ze,n=>{_(s()),u(()=>!s().metadata_url&&!s().callback_url&&!s().webhook_url)&&n(et)})}t(he),t(ce),t(ee),t($);var tt=a($,2);{var rt=n=>{var c=Et(),b=r(c),C=a(r(b),2),G=r(C);pe(G,{title:"Controller Webhook URL",content:"Unique webhook URL for this GARM controller. This is the preferred URL to use in GitHub webhook settings as it's controller-specific and allows multiple GARM controllers to work with the same repository. Automatically combines the webhook base URL with the controller ID."}),t(C),t(b);var R=a(b,2),B=r(R),be=a(r(B),2),ye=r(be),je=r(ye,!0);t(ye),ve(2),t(be),t(B),t(R),t(c),D(()=>S(je,(_(s()),u(()=>s().controller_webhook_url)))),m(n,c)};z(tt,n=>{_(s()),u(()=>s().controller_webhook_url)&&n(rt)})}t(J),t(q);var at=a(q,2);{var ot=n=>{Mt(n,{$$events:{close:se},children:(c,b)=>{var C=qt(),G=a(r(C),2),R=r(G),B=a(r(R),2);Ue(B);let be;var ye=a(B,2);{var je=g=>{var te=Ht();m(g,te)};z(ye,g=>{e(f),e(p),u(()=>!e(f)(e(p)))&&g(je)})}ve(2),t(R);var $e=a(R,2),_e=a(r($e),2);Ue(_e);let Te;var st=a(_e,2);{var it=g=>{var te=Gt();m(g,te)};z(st,g=>{e(f),e(y),u(()=>!e(f)(e(y)))&&g(it)})}ve(2),t($e);var Be=a($e,2),we=a(r(Be),2);Ue(we);let De;var lt=a(we,2);{var nt=g=>{var te=Pt();m(g,te)};z(lt,g=>{e(f),e(d),u(()=>!e(f)(e(d)))&&g(nt)})}ve(2),t(Be);var Ae=a(Be,2),Ve=a(r(Ae),2);Ue(Ve),ve(2),t(Ae);var We=a(Ae,2),Ie=r(We),Ee=a(Ie,2),dt=r(Ee,!0);t(Ee),t(We),t(G),t(C),D((g,te,ct)=>{be=xe(B,1,"block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm focus:outline-none focus:ring-blue-500 focus:border-blue-500 bg-white dark:bg-gray-700 text-gray-900 dark:text-white sm:text-sm",null,be,g),Te=xe(_e,1,"block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm focus:outline-none focus:ring-blue-500 focus:border-blue-500 bg-white dark:bg-gray-700 text-gray-900 dark:text-white sm:text-sm",null,Te,te),De=xe(we,1,"block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm focus:outline-none focus:ring-blue-500 focus:border-blue-500 bg-white dark:bg-gray-700 text-gray-900 dark:text-white sm:text-sm",null,De,ct),Ie.disabled=e(k),Ee.disabled=!e(re)||e(k),S(dt,e(k)?"Saving...":"Save Changes")},[()=>({"border-red-300":!e(f)(e(p))}),()=>({"border-red-300":!e(f)(e(y))}),()=>({"border-red-300":!e(f)(e(d))})]),Re(B,()=>e(p),g=>v(p,g)),Re(_e,()=>e(y),g=>v(y,g)),Re(we,()=>e(d),g=>v(d,g)),Re(Ve,()=>e(L),g=>v(L,g)),Ge("click",Ie,se),Ge("submit",G,wt(ge)),m(c,C)},$$slots:{default:!0}})};z(at,n=>{e(l)&&n(ot)})}D(n=>{S(le,`v${n??""}`),S(w,(_(s()),u(()=>s().controller_id))),S(U,(_(s()),u(()=>s().hostname||"Unknown"))),S(N,`${_(s()),u(()=>s().minimum_job_age_backoff||30)??""}s`)},[()=>(_(s()),u(()=>s().version?.replace(/^v/,"")||"Unknown"))]),m(me,E),Qe()}var Vt=h('

                Error loading dashboard

                '),Wt=h('
                '),Jt=h('

                Dashboard

                Welcome to GARM - GitHub Actions Runner Manager

                ');function or(me,V){Oe(V,!1);const[f,re]=ft(),s=()=>xt(_t,"$eagerCache",f),ae=I();let l=I({repositories:0,organizations:0,pools:0,instances:0}),k=I(null),p=I(""),y=[];function d(i,o,w=1e3){const M=parseInt(i.textContent||"0"),x=(o-M)/(w/16);let U=M;const A=()=>{if(U+=x,x>0&&U>=o||x<0&&U<=o){i.textContent=o.toString();return}i.textContent=Math.floor(U).toString(),requestAnimationFrame(A)};M!==o&&requestAnimationFrame(A)}mt(async()=>{try{const[x,U,A,W,Y]=await Promise.all([Ce.getRepositories(),Ce.getOrganizations(),Ce.getPools(),Ke.listInstances(),Ce.getControllerInfo()]);setTimeout(()=>{const Z=document.querySelector('[data-stat="repositories"]'),O=document.querySelector('[data-stat="organizations"]'),N=document.querySelector('[data-stat="pools"]'),ee=document.querySelector('[data-stat="instances"]');Z&&d(Z,x.length),O&&d(O,U.length),N&&d(N,A.length),ee&&d(ee,W.length)},100),v(l,{repositories:x.length,organizations:U.length,pools:A.length,instances:W.length}),Y&&v(k,Y)}catch(x){v(p,Ut(x)),console.error("Dashboard error:",x)}const i=Me.subscribeToEntity("repository",["create","delete"],L),o=Me.subscribeToEntity("organization",["create","delete"],oe),w=Me.subscribeToEntity("pool",["create","delete"],ge),M=Me.subscribeToEntity("instance",["create","delete"],se);y=[i,o,w,M]}),gt(()=>{y.forEach(i=>i())});function L(i){const o=document.querySelector('[data-stat="repositories"]');i.operation==="create"?(Q(l,e(l).repositories++),o&&d(o,e(l).repositories,500)):i.operation==="delete"&&(Q(l,e(l).repositories=Math.max(0,e(l).repositories-1)),o&&d(o,e(l).repositories,500))}function oe(i){const o=document.querySelector('[data-stat="organizations"]');i.operation==="create"?(Q(l,e(l).organizations++),o&&d(o,e(l).organizations,500)):i.operation==="delete"&&(Q(l,e(l).organizations=Math.max(0,e(l).organizations-1)),o&&d(o,e(l).organizations,500))}function ge(i){const o=document.querySelector('[data-stat="pools"]');i.operation==="create"?(Q(l,e(l).pools++),o&&d(o,e(l).pools,500)):i.operation==="delete"&&(Q(l,e(l).pools=Math.max(0,e(l).pools-1)),o&&d(o,e(l).pools,500))}function se(i){const o=document.querySelector('[data-stat="instances"]');i.operation==="create"?(Q(l,e(l).instances++),o&&d(o,e(l).instances,500)):i.operation==="delete"&&(Q(l,e(l).instances=Math.max(0,e(l).instances-1)),o&&d(o,e(l).instances,500))}function E(i){v(k,i.detail)}function q(i){return{blue:"bg-blue-500 text-white",green:"bg-green-500 text-white",purple:"bg-purple-500 text-white",yellow:"bg-yellow-500 text-white"}[i]||"bg-gray-500 text-white"}Le(()=>(e(k),s()),()=>{(!e(k)||s().loaded.controllerInfo)&&v(k,s().controllerInfo)}),Le(()=>(e(l),P),()=>{v(ae,[{title:"Repositories",value:e(l).repositories,icon:"M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z",color:"blue",href:P("/repositories")},{title:"Organizations",value:e(l).organizations,icon:"M17 20h5v-2a3 3 0 00-5.356-1.857M17 20H7m10 0v-2c0-.656-.126-1.283-.356-1.857M7 20H2v-2a3 3 0 015.356-1.857M7 20v-2c0-.656.126-1.283.356-1.857m0 0a5.002 5.002 0 019.288 0M15 7a3 3 0 11-6 0 3 3 0 016 0zm6 3a2 2 0 11-4 0 2 2 0 014 0zM7 10a2 2 0 11-4 0 2 2 0 014 0z",color:"green",href:P("/organizations")},{title:"Pools",value:e(l).pools,icon:"M4.318 6.318a4.5 4.5 0 000 6.364L12 20.364l7.682-7.682a4.5 4.5 0 00-6.364-6.364L12 7.636l-1.318-1.318a4.5 4.5 0 00-6.364 0z",color:"purple",href:P("/pools")},{title:"Instances",value:e(l).instances,icon:"M9 3v2m6-2v2M9 19v2m6-2v2M5 9H3m2 6H3m18-6h-2m2 6h-2M7 19h10a2 2 0 002-2V7a2 2 0 00-2-2H7a2 2 0 00-2 2v10a2 2 0 002 2zM9 9h6v6H9V9z",color:"yellow",href:P("/instances")}])}),Ne(),Fe();var J=Jt();bt(i=>{pt.title="Dashboard - GARM"});var F=a(r(J),2);{var K=i=>{var o=Vt(),w=r(o),M=a(r(w),2),x=a(r(M),2),U=r(x,!0);t(x),t(M),t(w),t(o),D(()=>S(U,e(p))),m(i,o)};z(F,i=>{e(p)&&i(K)})}var j=a(F,2);ht(j,5,()=>e(ae),kt,(i,o)=>{var w=Wt(),M=r(w),x=r(M),U=r(x),A=r(U),W=r(A),Y=r(W);t(W),t(A),t(U);var Z=a(U,2),O=r(Z),N=r(O),ee=r(N,!0);t(N);var ce=a(N,2),he=r(ce,!0);t(ce),t(O),t(Z),t(x),t(M),t(w),D((ke,Se)=>{ue(w,"href",(e(o),u(()=>e(o).href))),xe(A,1,`w-8 h-8 rounded-md ${ke??""} flex items-center justify-center`),ue(Y,"d",(e(o),u(()=>e(o).icon))),S(ee,(e(o),u(()=>e(o).title))),ue(ce,"data-stat",Se),S(he,(e(o),u(()=>e(o).value)))},[()=>(e(o),u(()=>q(e(o).color))),()=>(e(o),u(()=>e(o).title.toLowerCase()))]),m(i,w)}),t(j);var T=a(j,2);{var ie=i=>{Dt(i,{get controllerInfo(){return e(k)},$$events:{updated:E}})};z(T,i=>{e(k)&&i(ie)})}var le=a(T,2),ne=r(le),$=a(r(ne),4),H=r($),X=a(H,2),de=a(X,2);t($),t(ne),t(le),t(J),D((i,o,w)=>{ue(H,"href",i),ue(X,"href",o),ue(de,"href",w)},[()=>(_(P),u(()=>P("/repositories"))),()=>(_(P),u(()=>P("/pools"))),()=>(_(P),u(()=>P("/instances")))]),m(me,J),Qe(),re()}export{or as component}; diff --git a/webapp/assets/_app/immutable/nodes/3.oB-RZdV5.js b/webapp/assets/_app/immutable/nodes/3.oB-RZdV5.js deleted file mode 100644 index f9b0bb1e..00000000 --- a/webapp/assets/_app/immutable/nodes/3.oB-RZdV5.js +++ /dev/null @@ -1,7 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as jt}from"../chunks/zNh6Oe5P.js";import{p as Gt,g as e,o as Ut,l as Q,a as qt,f as E,e as f,h as zt,b as Fe,c as $,d as Bt,$ as St,m as y,s as n,i as Lt,j as o,k as a,r as i,u,n as be,q as Le,t as se,v as W,w as rt,x as Nt,y as _,z as X}from"../chunks/sWNKMed7.js";import{i as G,s as Kt,a as Vt}from"../chunks/Ccl3fNd2.js";import{e as Ht,i as Rt}from"../chunks/BuuPrWMc.js";import{h as Yt,r as L,s as je,b as Ot,a as Jt,g as ve}from"../chunks/DVl4ZBgx.js";import{b as N,a as Qt}from"../chunks/CLagxtgo.js";import{p as at}from"../chunks/D4Caz1gY.js";import{P as Wt}from"../chunks/qfys27k5.js";import{F as Xt}from"../chunks/z0u7Z3zm.js";import{D as Zt,A as ot,G as it,a as er}from"../chunks/DCYYzf48.js";import{e as tr,a as Ne}from"../chunks/DA-798Ko.js";import{t as ke}from"../chunks/BZUCTtPY.js";import{f as rr,p as ar,g as Ke,B as nt,c as or}from"../chunks/cjRLNre3.js";import{e as Ve}from"../chunks/BZiHL9L3.js";import"../chunks/C6O4o7G1.js";import{E as ir}from"../chunks/CARsAFuo.js";import{S as nr}from"../chunks/BJXodF8n.js";var dr=E('

                '),sr=E(""),lr=E('

                '),cr=E('

                Gitea only supports PAT authentication

                '),ur=E('
                '),pr=E('

                or drag and drop

                PEM, KEY files only

                ',1),gr=E(''),yr=E('
                '),br=E('

                or drag and drop

                PEM, KEY files only. Upload new private key.

                ',1),vr=E(" ",1),fr=E(''),mr=E(''),xr=E('
                ',1);function zr(dt,st){Gt(st,!1);const[lt,ct]=Kt(),U=()=>Vt(tr,"$eagerCache",lt),Ge=y(),Z=y(),He=y(),Ue=y(),p={PAT:"pat",APP:"app"};let we=y(!0),le=y([]),J=y([]),Ce=y(""),Pe=y(""),K=y(1),ce=y(25),ue=y(1),Ae=y(!1),Te=y(!1),$e=y(!1),D=y(p.PAT),g=y(null),M=y(null),t=y({name:"",description:"",endpoint:"",auth_type:p.PAT,oauth2_token:"",app_id:"",installation_id:"",private_key_bytes:""}),Ee={...e(t)},ee=y(!1);function ut(r){r.key==="Escape"&&(e(Ae)||e(Te)||e($e))&&P()}Ut(async()=>{try{n(we,!0);const[r,d]=await Promise.all([Ne.getCredentials(),Ne.getEndpoints()]);r&&Array.isArray(r)&&n(le,r),d&&Array.isArray(d)&&n(J,d)}catch(r){console.error("Failed to load credentials:",r),n(Ce,r instanceof Error?r.message:"Failed to load credentials")}finally{n(we,!1)}});async function pt(){try{await Ne.retryResource("credentials")}catch(r){console.error("Retry failed:",r)}}async function gt(){Re(),n(Ae,!0),n(x,"github")}let x=y("");function yt(r){n(x,r.detail),_(t,e(t).auth_type=p.PAT),n(D,p.PAT)}async function qe(r){n(g,r),n(t,{name:r.name||"",description:r.description||"",endpoint:r.endpoint?.name||"",auth_type:r["auth-type"]||p.PAT,oauth2_token:"",app_id:"",installation_id:"",private_key_bytes:""}),n(D,r["auth-type"]||p.PAT),Ee={...e(t)},n(ee,!1),n(Te,!0)}function ze(r){n(M,r),n($e,!0)}function Re(){n(t,{name:"",description:"",endpoint:"",auth_type:p.PAT,oauth2_token:"",app_id:"",installation_id:"",private_key_bytes:""}),Ee={...e(t)},n(D,p.PAT),n(ee,!1)}function P(){n(Ae,!1),n(Te,!1),n($e,!1),n(g,null),n(M,null),n(x,""),Re()}function Ye(r){n(D,r),_(t,e(t).auth_type=r)}function bt(){const r={};if(e(t).name!==Ee.name&&e(t).name.trim()!==""&&(r.name=e(t).name.trim()),e(t).description!==Ee.description&&e(t).description.trim()!==""&&(r.description=e(t).description.trim()),e(ee)&&e(g))if(e(g)["auth-type"]===p.PAT)e(t).oauth2_token.trim()!==""&&(r.pat={oauth2_token:e(t).oauth2_token.trim()});else{const d={};let b=!1;if(e(t).app_id.trim()!==""&&(d.app_id=parseInt(e(t).app_id.trim()),b=!0),e(t).installation_id.trim()!==""&&(d.installation_id=parseInt(e(t).installation_id.trim()),b=!0),e(t).private_key_bytes!=="")try{const m=atob(e(t).private_key_bytes);d.private_key_bytes=Array.from(m,l=>l.charCodeAt(0)),b=!0}catch{}b&&(r.app=d)}return r}async function vt(){try{if(e(x)==="github")await ve.createGithubCredentials(e(t));else if(e(x)==="gitea")await ve.createGiteaCredentials(e(t));else throw new Error("Please select a forge type");ke.success("Credentials Created",`Credentials ${e(t).name} have been created successfully.`),P()}catch(r){n(Ce,Ve(r))}}async function ft(){if(!(!e(g)||!e(g).id))try{const r=bt();if(Object.keys(r).length===0){ke.info("No Changes","No fields were modified."),P();return}e(g).forge_type==="github"?await ve.updateGithubCredentials(e(g).id,r):await ve.updateGiteaCredentials(e(g).id,r),ke.success("Credentials Updated",`Credentials ${e(g)?.name||"Unknown"} have been updated successfully.`),P()}catch(r){n(Ce,Ve(r))}}async function mt(){if(!(!e(M)||!e(M).id))try{e(M).forge_type==="github"?await ve.deleteGithubCredentials(e(M).id):await ve.deleteGiteaCredentials(e(M).id),ke.success("Credentials Deleted",`Credentials ${e(M)?.name||"Unknown"} have been deleted successfully.`)}catch(r){const d=Ve(r);ke.error("Delete Failed",d)}finally{P()}}function Oe(r){const b=r.target.files?.[0];if(!b){_(t,e(t).private_key_bytes="");return}const m=new FileReader;m.onload=l=>{const h=l.target?.result;_(t,e(t).private_key_bytes=btoa(h))},m.readAsText(b)}function Je(){return!e(t).name||!e(t).description||!e(t).endpoint?!1:e(t).auth_type===p.PAT?!!e(t).oauth2_token:!!e(t).app_id&&!!e(t).installation_id&&!!e(t).private_key_bytes}function Qe(){return!e(t).name.trim()||!e(t).description.trim()?!1:e(ee)&&e(g)?e(g)["auth-type"]===p.PAT?!!e(t).oauth2_token.trim():!!e(t).app_id.trim()&&!!e(t).installation_id.trim()&&!!e(t).private_key_bytes:!0}function xt(r){return e(J).find(b=>b.name===r)?.endpoint_type||""}function _t(r){return xt(r)==="gitea"}const ht=[{key:"name",title:"Name",cellComponent:it,cellProps:{field:"name"}},{key:"description",title:"Description",cellComponent:it,cellProps:{field:"description",type:"description"}},{key:"endpoint",title:"Endpoint",cellComponent:ir},{key:"auth_type",title:"Auth Type",cellComponent:nr,cellProps:{statusType:"custom",statusField:"auth-type"}},{key:"actions",title:"Actions",align:"right",cellComponent:er}],kt={entityType:"credential",primaryText:{field:"name",isClickable:!1},secondaryText:{field:"description"},customInfo:[{icon:r=>Ke(r?.forge_type||"unknown"),text:r=>r?.endpoint?.name||"Unknown"}],badges:[{type:"auth",field:"auth-type"}],actions:[{type:"edit",handler:r=>qe(r)},{type:"delete",handler:r=>ze(r)}]};function wt(r){n(Pe,r.detail.term),n(K,1)}function Ct(r){n(K,r.detail.page)}function Pt(r){const d=or(r.detail.perPage);n(ce,d.newPerPage),n(K,d.newCurrentPage)}function At(r){qe(r.detail.item)}function Tt(r){ze(r.detail.item)}Q(()=>(e(le),U()),()=>{(!e(le).length||U().loaded.credentials)&&n(le,U().credentials)}),Q(()=>U(),()=>{n(we,U().loading.credentials)}),Q(()=>U(),()=>{n(Ge,U().errorMessages.credentials)}),Q(()=>(e(J),U()),()=>{(!e(J).length||U().loaded.endpoints)&&n(J,U().endpoints)}),Q(()=>(e(le),e(Pe)),()=>{n(Z,rr(e(le),e(Pe)))}),Q(()=>(e(ue),e(Z),e(ce),e(K)),()=>{n(ue,Math.ceil(e(Z).length/e(ce))),e(K)>e(ue)&&e(ue)>0&&n(K,e(ue))}),Q(()=>(e(Z),e(K),e(ce)),()=>{n(He,ar(e(Z),e(K),e(ce)))}),Q(()=>(e(x),e(J)),()=>{n(Ue,e(x)?e(J).filter(r=>r.endpoint_type===e(x)):e(J))}),qt(),jt();var We=xr();f("keydown",Lt,ut),zt(r=>{St.title="Credentials - GARM"});var Be=Fe(We),Xe=o(Be);Wt(Xe,{title:"Credentials",description:"Manage authentication credentials for your GitHub and Gitea endpoints.",actionLabel:"Add Credentials",$$events:{action:gt}});var $t=a(Xe,2);{let r=Le(()=>e(Ge)||e(Ce)),d=Le(()=>!!e(Ge));Zt($t,{get columns(){return ht},get data(){return e(He)},get loading(){return e(we)},get error(){return e(r)},get searchTerm(){return e(Pe)},searchPlaceholder:"Search credentials by name, description, or endpoint...",get currentPage(){return e(K)},get perPage(){return e(ce)},get totalPages(){return e(ue)},get totalItems(){return e(Z),u(()=>e(Z).length)},itemName:"credentials",emptyIconType:"key",get showRetry(){return e(d)},get mobileCardConfig(){return kt},$$events:{search:wt,pageChange:Ct,perPageChange:Pt,retry:pt,edit:At,delete:Tt},$$slots:{"mobile-card":(b,m)=>{const l=Le(()=>m.item);var h=dr(),A=o(h),q=o(A),I=o(q),F=o(I,!0);i(I);var k=a(I,2),V=o(k,!0);i(k);var z=a(k,2),B=o(z),S=o(B);Yt(S,()=>(be(Ke),be(e(l)),u(()=>Ke(e(l).forge_type||"unknown"))));var te=a(S,2),re=o(te,!0);i(te),i(B),i(z),i(q),i(A);var ae=a(A,2),H=o(ae);{var oe=j=>{nt(j,{variant:"success",text:"PAT"})},R=j=>{nt(j,{variant:"info",text:"App"})};G(H,j=>{be(e(l)),u(()=>(e(l)["auth-type"]||"pat")==="pat")?j(oe):j(R,!1)})}var Y=a(H,2),ie=o(Y);ot(ie,{action:"edit",size:"sm",title:"Edit credentials",ariaLabel:"Edit credentials",$$events:{click:()=>qe(e(l))}});var pe=a(ie,2);ot(pe,{action:"delete",size:"sm",title:"Delete credentials",ariaLabel:"Delete credentials",$$events:{click:()=>ze(e(l))}}),i(Y),i(ae),i(h),se(()=>{W(F,(be(e(l)),u(()=>e(l).name))),W(V,(be(e(l)),u(()=>e(l).description))),W(re,(be(e(l)),u(()=>e(l).endpoint?.name||"Unknown")))}),$(b,h)}}})}i(Be);var Ze=a(Be,2);{var Et=r=>{var d=gr(),b=o(d),m=a(b,2),l=o(m),h=a(o(l),2);i(l);var A=a(l,2),q=o(A);Xt(q,{get selectedForgeType(){return e(x)},set selectedForgeType(s){n(x,s)},$$events:{select:yt},$$legacy:!0});var I=a(q,2),F=a(o(I),2);L(F),i(I);var k=a(I,2),V=a(o(k),2);rt(V),i(k);var z=a(k,2),B=a(o(z),2);se(()=>{e(t),Nt(()=>{e(Ue)})});var S=o(B);S.value=S.__value="";var te=a(S);Ht(te,1,()=>e(Ue),Rt,(s,c)=>{var v=sr(),C=o(v);i(v);var T={};se(()=>{W(C,`${e(c),u(()=>e(c).name)??""} (${e(c),u(()=>e(c).endpoint_type)??""})`),T!==(T=(e(c),u(()=>e(c).name)))&&(v.value=(v.__value=(e(c),u(()=>e(c).name)))??"")}),$(s,v)}),i(B);var re=a(B,2);{var ae=s=>{var c=lr(),v=o(c);i(c),se(()=>W(v,`Showing only ${e(x)??""} endpoints`)),$(s,c)};G(re,s=>{e(x)&&s(ae)})}i(z);var H=a(z,2),oe=a(o(H),2),R=o(oe),Y=a(R,2);i(oe);var ie=a(oe,2);{var pe=s=>{var c=cr();$(s,c)};G(ie,s=>{e(x)==="gitea"&&s(pe)})}i(H);var j=a(H,2);{var De=s=>{var c=ur(),v=a(o(c),2);L(v),i(c),N(v,()=>e(t).oauth2_token,C=>_(t,e(t).oauth2_token=C)),$(s,c)};G(j,s=>{e(D),u(()=>e(D)===p.PAT)&&s(De)})}var fe=a(j,2);{var Me=s=>{var c=pr(),v=Fe(c),C=a(o(v),2);L(C),i(v);var T=a(v,2),O=a(o(T),2);L(O),i(T);var de=a(T,2),ge=a(o(de),2),xe=o(ge),_e=a(xe,2),he=a(o(_e),2),Ie=o(he);X(),i(he),X(2),i(_e),i(ge),i(de),N(C,()=>e(t).app_id,ye=>_(t,e(t).app_id=ye)),N(O,()=>e(t).installation_id,ye=>_(t,e(t).installation_id=ye)),f("change",xe,Oe),f("click",Ie,()=>document.getElementById("private_key")?.click()),$(s,c)};G(fe,s=>{e(D),u(()=>e(D)===p.APP)&&s(Me)})}var w=a(fe,2),ne=o(w),me=a(ne,2);i(w),i(A),i(m),i(d),se((s,c,v)=>{je(R,1,`flex-1 py-2 px-4 text-sm font-medium rounded-md border focus:outline-none focus:ring-2 focus:ring-blue-500 cursor-pointer - ${e(D),u(()=>e(D)===p.PAT?"bg-blue-600 text-white border-blue-600":"bg-white dark:bg-gray-700 text-gray-700 dark:text-gray-300 border-gray-300 dark:border-gray-600 hover:bg-gray-50 dark:hover:bg-gray-600")??""} - ${s??""}`),Y.disabled=e(x)==="gitea",je(Y,1,`flex-1 py-2 px-4 text-sm font-medium rounded-md border focus:outline-none focus:ring-2 focus:ring-blue-500 - ${e(D),u(()=>e(D)===p.APP?"bg-blue-600 text-white border-blue-600":"bg-white dark:bg-gray-700 text-gray-700 dark:text-gray-300 border-gray-300 dark:border-gray-600 hover:bg-gray-50 dark:hover:bg-gray-600")??""} - ${e(x)==="gitea"?"opacity-50 cursor-not-allowed":"cursor-pointer"}`),me.disabled=c,je(me,1,`px-4 py-2 text-sm font-medium text-white rounded-md focus:outline-none focus:ring-2 focus:ring-offset-2 transition-colors - ${v??""}`)},[()=>(e(t),u(()=>(e(t).endpoint&&_t(e(t).endpoint),""))),()=>u(()=>!Je()),()=>u(()=>Je()?"bg-blue-600 hover:bg-blue-700 focus:ring-blue-500 cursor-pointer":"bg-gray-400 cursor-not-allowed")]),f("click",b,P),f("click",h,P),N(F,()=>e(t).name,s=>_(t,e(t).name=s)),N(V,()=>e(t).description,s=>_(t,e(t).description=s)),Ot(B,()=>e(t).endpoint,s=>_(t,e(t).endpoint=s)),f("click",R,()=>Ye(p.PAT)),f("click",Y,()=>Ye(p.APP)),f("click",ne,P),f("submit",A,at(vt)),$(r,d)};G(Ze,r=>{e(Ae)&&r(Et)})}var et=a(Ze,2);{var Dt=r=>{var d=fr(),b=o(d),m=a(b,2),l=o(m),h=o(l),A=a(o(h),2),q=o(A);i(A),i(h);var I=a(h,2);i(l);var F=a(l,2),k=o(F),V=a(o(k),2);L(V),i(k);var z=a(k,2),B=a(o(z),2);rt(B),i(z);var S=a(z,2),te=a(o(S),2);L(te),X(2),i(S);var re=a(S,2),ae=a(o(re),2),H=o(ae),oe=o(H,!0);i(H),i(ae),X(2),i(re);var R=a(re,2),Y=o(R),ie=o(Y);L(ie),X(2),i(Y),X(2),i(R);var pe=a(R,2);{var j=w=>{var ne=vr(),me=Fe(ne);{var s=C=>{var T=yr(),O=a(o(T),2);L(O),i(T),N(O,()=>e(t).oauth2_token,de=>_(t,e(t).oauth2_token=de)),$(C,T)};G(me,C=>{e(g),u(()=>e(g)["auth-type"]===p.PAT)&&C(s)})}var c=a(me,2);{var v=C=>{var T=br(),O=Fe(T),de=a(o(O),2);L(de),i(O);var ge=a(O,2),xe=a(o(ge),2);L(xe),i(ge);var _e=a(ge,2),he=a(o(_e),2),Ie=o(he),ye=a(Ie,2),tt=a(o(ye),2),Ft=o(tt);X(),i(tt),X(2),i(ye),i(he),i(_e),N(de,()=>e(t).app_id,Se=>_(t,e(t).app_id=Se)),N(xe,()=>e(t).installation_id,Se=>_(t,e(t).installation_id=Se)),f("change",Ie,Oe),f("click",Ft,()=>document.getElementById("edit_private_key")?.click()),$(C,T)};G(c,C=>{e(g),u(()=>e(g)["auth-type"]===p.APP)&&C(v)})}$(w,ne)};G(pe,w=>{e(ee)&&w(j)})}var De=a(pe,2),fe=o(De),Me=a(fe,2);i(De),i(F),i(m),i(d),se((w,ne)=>{W(q,`Update credentials for ${e(g),u(()=>e(g)?.name||"Unknown")??""}`),Jt(te,(e(t),u(()=>e(t).endpoint))),W(oe,(e(g),u(()=>(e(g)?.["auth-type"]||p.PAT)===p.PAT?"Personal Access Token (PAT)":"GitHub App"))),Me.disabled=w,je(Me,1,`px-4 py-2 text-sm font-medium text-white rounded-md focus:outline-none focus:ring-2 focus:ring-offset-2 transition-colors - ${ne??""}`)},[()=>u(()=>!Qe()),()=>u(()=>Qe()?"bg-blue-600 hover:bg-blue-700 focus:ring-blue-500 cursor-pointer":"bg-gray-400 cursor-not-allowed")]),f("click",b,P),f("click",I,P),N(V,()=>e(t).name,w=>_(t,e(t).name=w)),N(B,()=>e(t).description,w=>_(t,e(t).description=w)),Qt(ie,()=>e(ee),w=>n(ee,w)),f("click",fe,P),f("submit",F,at(ft)),$(r,d)};G(et,r=>{e(Te)&&e(g)&&r(Dt)})}var Mt=a(et,2);{var It=r=>{var d=mr(),b=o(d),m=a(b,2),l=o(m),h=o(l),A=a(o(h),2),q=a(o(A),2),I=o(q);i(q),i(A),i(h),i(l);var F=a(l,2),k=o(F),V=a(k,2);i(F),i(m),i(d),se(()=>W(I,`Are you sure you want to delete the credentials "${e(M),u(()=>e(M)?.name||"Unknown")??""}"? This action cannot be undone.`)),f("click",b,P),f("click",k,P),f("click",V,mt),$(r,d)};G(Mt,r=>{e($e)&&e(M)&&r(It)})}$(dt,We),Bt(),ct()}export{zr as component}; diff --git a/webapp/assets/_app/immutable/nodes/4.DDn9he0o.js b/webapp/assets/_app/immutable/nodes/4.DDn9he0o.js deleted file mode 100644 index 593f69d0..00000000 --- a/webapp/assets/_app/immutable/nodes/4.DDn9he0o.js +++ /dev/null @@ -1,3 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as bt}from"../chunks/zNh6Oe5P.js";import{p as gt,g as e,o as ft,l as Z,a as _t,f as j,e as b,h as mt,b as Ce,c as I,d as yt,m as p,i as vt,$ as xt,j as o,q as Ee,k as a,t as ye,u as C,s,r as i,n as ie,v as se,w as Ie,z as V,y as g}from"../chunks/sWNKMed7.js";import{i as ne,s as ht,a as kt}from"../chunks/Ccl3fNd2.js";import{h as wt,r as U,c as je,s as Ge,g as ee}from"../chunks/DVl4ZBgx.js";import{b as E}from"../chunks/CLagxtgo.js";import{p as ze}from"../chunks/D4Caz1gY.js";import{P as Ct}from"../chunks/qfys27k5.js";import{F as Et}from"../chunks/z0u7Z3zm.js";import{D as Pt,A as qe,G as Pe,a as $t}from"../chunks/DCYYzf48.js";import{e as At,a as He}from"../chunks/DA-798Ko.js";import{t as de}from"../chunks/BZUCTtPY.js";import{g as $e,c as Rt,a as Bt,p as Ut}from"../chunks/cjRLNre3.js";import{e as Ae}from"../chunks/BZiHL9L3.js";import"../chunks/C6O4o7G1.js";import{E as Lt}from"../chunks/CARsAFuo.js";var Mt=j('

                '),Tt=j('
                ',1),Dt=j('

                If empty, Base URL will be used as API Base URL

                '),Ft=j(''),It=j('
                ',1),jt=j('

                If empty, Base URL will be used as API Base URL

                '),Gt=j(''),zt=j(''),qt=j('
                ',1);function or(Ne,Se){gt(Se,!1);const[Ve,Oe]=ht(),q=()=>kt(At,"$eagerCache",Ve),ve=p(),H=p(),Re=p();let le=p(!0),O=p([]),ue=p(""),ce=p(""),L=p(1),K=p(25),J=p(1),pe=p(!1),be=p(!1),ge=p(!1),G=p("github"),m=p(null),A=p(null),r=p({name:"",description:"",endpoint_type:"",base_url:"",api_base_url:"",upload_base_url:"",ca_cert_bundle:""}),k={...e(r)};ft(async()=>{try{s(le,!0);const t=await He.getEndpoints();t&&Array.isArray(t)&&s(O,t)}catch(t){console.error("Failed to load endpoints:",t),s(ue,t instanceof Error?t.message:"Failed to load endpoints")}finally{s(le,!1)}});async function Ke(){try{await He.retryResource("endpoints")}catch(t){console.error("Retry failed:",t)}}const Je=[{key:"name",title:"Name",cellComponent:Pe,cellProps:{field:"name"}},{key:"description",title:"Description",cellComponent:Pe,cellProps:{field:"description"}},{key:"api_url",title:"API URL",cellComponent:Pe,cellProps:{field:"api_base_url",fallbackField:"base_url"}},{key:"forge_type",title:"Forge Type",cellComponent:Lt},{key:"actions",title:"Actions",align:"right",cellComponent:$t}],Qe={entityType:"endpoint",primaryText:{field:"name",isClickable:!1},secondaryText:{field:"description"},customInfo:[{icon:t=>$e(t?.endpoint_type||"unknown"),text:t=>t?.api_base_url||"Unknown"}],actions:[{type:"edit",handler:t=>xe(t)},{type:"delete",handler:t=>he(t)}]};function We(t){s(ce,t.detail.term),s(L,1)}function Xe(t){s(L,t.detail.page)}function Ye(t){const n=Rt(t.detail.perPage);s(K,n.newPerPage),s(L,n.newCurrentPage)}function Ze(t){xe(t.detail.item)}function et(t){he(t.detail.item)}function tt(){s(G,"github"),Be(),s(pe,!0)}function rt(t){s(G,t.detail),g(r,e(r).endpoint_type=t.detail)}function xe(t){s(m,t),s(r,{name:t.name||"",description:t.description||"",endpoint_type:t.endpoint_type||"",base_url:t.base_url||"",api_base_url:t.api_base_url||"",upload_base_url:t.upload_base_url||"",ca_cert_bundle:typeof t.ca_cert_bundle=="string"?t.ca_cert_bundle:""}),k={...e(r)},s(be,!0)}function he(t){s(A,t),s(ge,!0)}function Be(){s(r,{name:"",description:"",endpoint_type:"",base_url:"",api_base_url:"",upload_base_url:"",ca_cert_bundle:""}),k={...e(r)}}function at(t){t.key==="Escape"&&(e(pe)||e(be)||e(ge))&&y()}function y(){s(pe,!1),s(be,!1),s(ge,!1),s(G,"github"),s(m,null),s(A,null),Be()}function ot(){const t={};if(e(r).description!==k.description&&(e(r).description.trim()!==""||k.description!=="")&&(t.description=e(r).description.trim()),e(r).base_url!==k.base_url&&e(r).base_url.trim()!==""&&(t.base_url=e(r).base_url.trim()),e(r).api_base_url!==k.api_base_url&&(e(r).api_base_url.trim()!==""||k.api_base_url!=="")&&(t.api_base_url=e(r).api_base_url.trim()),e(m)?.endpoint_type==="github"&&e(r).upload_base_url!==k.upload_base_url&&(e(r).upload_base_url.trim()!==""||k.upload_base_url!=="")&&(t.upload_base_url=e(r).upload_base_url.trim()),e(r).ca_cert_bundle!==k.ca_cert_bundle)if(e(r).ca_cert_bundle!=="")try{const n=atob(e(r).ca_cert_bundle);t.ca_cert_bundle=Array.from(n,c=>c.charCodeAt(0))}catch{k.ca_cert_bundle!==""&&(t.ca_cert_bundle=[])}else k.ca_cert_bundle!==""&&(t.ca_cert_bundle=[]);return t}async function it(){try{const t={name:e(r).name,description:e(r).description,endpoint_type:e(r).endpoint_type,base_url:e(r).base_url,api_base_url:e(r).api_base_url,upload_base_url:e(r).upload_base_url};if(e(r).ca_cert_bundle&&e(r).ca_cert_bundle.trim()!=="")try{const n=atob(e(r).ca_cert_bundle);t.ca_cert_bundle=Array.from(n,c=>c.charCodeAt(0))}catch{}e(r).endpoint_type==="github"?await ee.createGithubEndpoint(t):await ee.createGiteaEndpoint(t),de.success("Endpoint Created",`Endpoint ${e(r).name} has been created successfully.`),y()}catch(t){s(ue,Ae(t))}}async function st(){if(e(m))try{const t=ot();if(Object.keys(t).length===0){de.info("No Changes","No fields were modified."),y();return}e(m).endpoint_type==="github"?await ee.updateGithubEndpoint(e(m).name,t):await ee.updateGiteaEndpoint(e(m).name,t),de.success("Endpoint Updated",`Endpoint ${e(m).name} has been updated successfully.`),y()}catch(t){s(ue,Ae(t))}}async function nt(){if(e(A)){try{e(A).endpoint_type==="github"?await ee.deleteGithubEndpoint(e(A).name):await ee.deleteGiteaEndpoint(e(A).name),de.success("Endpoint Deleted",`Endpoint ${e(A).name} has been deleted successfully.`)}catch(t){const n=Ae(t);de.error("Delete Failed",n)}y()}}function Ue(t){const c=t.target.files?.[0];if(!c){g(r,e(r).ca_cert_bundle="");return}const v=new FileReader;v.onload=d=>{const f=d.target?.result;g(r,e(r).ca_cert_bundle=btoa(f))},v.readAsText(c)}function fe(){return!(!e(r).name||!e(r).description||!e(r).base_url||e(r).endpoint_type==="github"&&!e(r).api_base_url)}Z(()=>(e(O),q()),()=>{(!e(O).length||q().loaded.endpoints)&&s(O,q().endpoints)}),Z(()=>q(),()=>{s(le,q().loading.endpoints)}),Z(()=>q(),()=>{s(ve,q().errorMessages.endpoints)}),Z(()=>(e(O),e(ce)),()=>{s(H,Bt(e(O),e(ce)))}),Z(()=>(e(J),e(H),e(K),e(L)),()=>{s(J,Math.ceil(e(H).length/e(K))),e(L)>e(J)&&e(J)>0&&s(L,e(J))}),Z(()=>(e(H),e(L),e(K)),()=>{s(Re,Ut(e(H),e(L),e(K)))}),_t(),bt();var Le=qt();b("keydown",vt,at),mt(t=>{xt.title="Endpoints - GARM"});var ke=Ce(Le),Me=o(ke);Ct(Me,{title:"Endpoints",description:"Manage your GitHub and Gitea endpoints for runner management.",actionLabel:"Add Endpoint",$$events:{action:tt}});var dt=a(Me,2);{let t=Ee(()=>e(ve)||e(ue)),n=Ee(()=>!!e(ve));Pt(dt,{get columns(){return Je},get data(){return e(Re)},get loading(){return e(le)},get error(){return e(t)},get searchTerm(){return e(ce)},searchPlaceholder:"Search endpoints by name, description, or URL...",get currentPage(){return e(L)},get perPage(){return e(K)},get totalPages(){return e(J)},get totalItems(){return e(H),C(()=>e(H).length)},itemName:"endpoints",emptyIconType:"settings",get showRetry(){return e(n)},get mobileCardConfig(){return Qe},$$events:{search:We,pageChange:Xe,perPageChange:Ye,retry:Ke,edit:Ze,delete:et},$$slots:{"mobile-card":(c,v)=>{const d=Ee(()=>v.item);var f=Mt(),x=o(f),R=o(x),P=o(R),w=o(P,!0);i(P);var _=a(P,2),M=o(_,!0);i(_);var B=a(_,2),T=o(B);wt(T,()=>(ie($e),ie(e(d)),C(()=>$e(e(d).endpoint_type||"","w-5 h-5"))));var D=a(T,2),Q=o(D,!0);i(D),i(B),i(R),i(x);var N=a(x,2),z=o(N);qe(z,{action:"edit",size:"sm",title:"Edit endpoint",ariaLabel:"Edit endpoint",$$events:{click:()=>xe(e(d))}});var W=a(z,2);qe(W,{action:"delete",size:"sm",title:"Delete endpoint",ariaLabel:"Delete endpoint",$$events:{click:()=>he(e(d))}}),i(N),i(f),ye(()=>{se(w,(ie(e(d)),C(()=>e(d).name))),se(M,(ie(e(d)),C(()=>e(d).description))),se(Q,(ie(e(d)),C(()=>e(d).endpoint_type)))}),I(c,f)}}})}i(ke);var Te=a(ke,2);{var lt=t=>{var n=Ft(),c=o(n),v=a(c,2),d=o(v),f=a(o(d),2);i(d);var x=a(d,2),R=o(x);Et(R,{get selectedForgeType(){return e(G)},set selectedForgeType(u){s(G,u)},$$events:{select:rt},$$legacy:!0});var P=a(R,2),w=a(o(P),2);U(w),i(P);var _=a(P,2),M=a(o(_),2);Ie(M),i(_);var B=a(_,2),T=a(o(B),2);U(T),i(B);var D=a(B,2);{var Q=u=>{var $=Tt(),l=Ce($),h=a(o(l),2);U(h),i(l);var F=a(l,2),S=a(o(F),2);U(S),i(F),E(h,()=>e(r).api_base_url,Y=>g(r,e(r).api_base_url=Y)),E(S,()=>e(r).upload_base_url,Y=>g(r,e(r).upload_base_url=Y)),I(u,$)},N=u=>{var $=Dt(),l=a(o($),2);U(l),V(2),i($),E(l,()=>e(r).api_base_url,h=>g(r,e(r).api_base_url=h)),I(u,$)};ne(D,u=>{e(G)==="github"?u(Q):u(N,!1)})}var z=a(D,2),W=a(o(z),2),X=o(W),te=a(X,2),re=a(o(te),2),_e=o(re);V(),i(re),V(2),i(te),i(W),i(z);var ae=a(z,2),me=o(ae),oe=a(me,2);i(ae),i(x),i(v),i(n),ye((u,$)=>{je(w,"placeholder",e(G)==="github"?"e.g., github-enterprise or github-com":"e.g., gitea-main or my-gitea"),je(T,"placeholder",e(G)==="github"?"https://github.com or https://github.example.com":"https://gitea.example.com"),oe.disabled=u,Ge(oe,1,`px-4 py-2 text-sm font-medium text-white rounded-md focus:outline-none focus:ring-2 focus:ring-offset-2 transition-colors - ${$??""}`)},[()=>C(()=>!fe()),()=>C(()=>fe()?"bg-blue-600 hover:bg-blue-700 focus:ring-blue-500 cursor-pointer":"bg-gray-400 cursor-not-allowed")]),b("click",c,y),b("click",f,y),E(w,()=>e(r).name,u=>g(r,e(r).name=u)),E(M,()=>e(r).description,u=>g(r,e(r).description=u)),E(T,()=>e(r).base_url,u=>g(r,e(r).base_url=u)),b("change",X,Ue),b("click",_e,()=>document.getElementById("ca_cert_file")?.click()),b("click",me,y),b("submit",x,ze(it)),I(t,n)};ne(Te,t=>{e(pe)&&t(lt)})}var De=a(Te,2);{var ut=t=>{var n=Gt(),c=o(n),v=a(c,2),d=o(v),f=o(d),x=o(f),R=o(x);i(x),V(2),i(f);var P=a(f,2);i(d);var w=a(d,2),_=o(w),M=a(o(_),2);U(M),i(_);var B=a(_,2),T=a(o(B),2);Ie(T),i(B);var D=a(B,2),Q=a(o(D),2);U(Q),i(D);var N=a(D,2);{var z=l=>{var h=It(),F=Ce(h),S=a(o(F),2);U(S),i(F);var Y=a(F,2),Fe=a(o(Y),2);U(Fe),i(Y),E(S,()=>e(r).api_base_url,we=>g(r,e(r).api_base_url=we)),E(Fe,()=>e(r).upload_base_url,we=>g(r,e(r).upload_base_url=we)),I(l,h)},W=l=>{var h=jt(),F=a(o(h),2);U(F),V(2),i(h),E(F,()=>e(r).api_base_url,S=>g(r,e(r).api_base_url=S)),I(l,h)};ne(N,l=>{e(m),C(()=>e(m).endpoint_type==="github")?l(z):l(W,!1)})}var X=a(N,2),te=a(o(X),2),re=o(te),_e=a(re,2),ae=a(o(_e),2),me=o(ae);V(),i(ae),V(2),i(_e),i(te),i(X);var oe=a(X,2),u=o(oe),$=a(u,2);i(oe),i(w),i(v),i(n),ye((l,h)=>{se(R,`Edit ${e(m),C(()=>e(m).endpoint_type==="github"?"GitHub":"Gitea")??""} Endpoint`),$.disabled=l,Ge($,1,`px-4 py-2 text-sm font-medium text-white rounded-md focus:outline-none focus:ring-2 focus:ring-offset-2 transition-colors - ${h??""}`)},[()=>C(()=>!fe()),()=>C(()=>fe()?"bg-blue-600 hover:bg-blue-700 focus:ring-blue-500 cursor-pointer":"bg-gray-400 cursor-not-allowed")]),b("click",c,y),b("click",P,y),E(M,()=>e(r).name,l=>g(r,e(r).name=l)),E(T,()=>e(r).description,l=>g(r,e(r).description=l)),E(Q,()=>e(r).base_url,l=>g(r,e(r).base_url=l)),b("change",re,Ue),b("click",me,()=>document.getElementById("edit_ca_cert_file")?.click()),b("click",u,y),b("submit",w,ze(st)),I(t,n)};ne(De,t=>{e(be)&&e(m)&&t(ut)})}var ct=a(De,2);{var pt=t=>{var n=zt(),c=o(n),v=a(c,2),d=o(v),f=o(d),x=a(o(f),2),R=a(o(x),2),P=o(R);i(R),i(x),i(f),i(d);var w=a(d,2),_=o(w),M=a(_,2);i(w),i(v),i(n),ye(()=>se(P,`Are you sure you want to delete the endpoint "${e(A),C(()=>e(A).name)??""}"? This action cannot be undone.`)),b("click",c,y),b("click",_,y),b("click",M,nt),I(t,n)};ne(ct,t=>{e(ge)&&e(A)&&t(pt)})}I(Ne,Le),yt(),Oe()}export{or as component}; diff --git a/webapp/assets/_app/immutable/nodes/5.A7yiJpQp.js b/webapp/assets/_app/immutable/nodes/5.A7yiJpQp.js deleted file mode 100644 index 8347b180..00000000 --- a/webapp/assets/_app/immutable/nodes/5.A7yiJpQp.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as Ge}from"../chunks/zNh6Oe5P.js";import{p as Se,E as Je,o as Be,l as w,s as r,m as n,g as e,a as He,f as M,k as o,j as i,r as l,c as $,t as N,v as K,x as $e,u as b,z as Ve,y as Z,e as Ce,d as Re,B as We,b as Ue,h as Ye,$ as Ke,n as A,q as ee}from"../chunks/sWNKMed7.js";import{a as Le,i as B,s as Ne}from"../chunks/Ccl3fNd2.js";import{r as Pe,b as Me,d as De,c as Oe,g as be}from"../chunks/DVl4ZBgx.js";import{P as Qe}from"../chunks/qfys27k5.js";import{e as Xe,i as Ze}from"../chunks/BuuPrWMc.js";import{b as Te}from"../chunks/CLagxtgo.js";import{p as et}from"../chunks/D4Caz1gY.js";import{M as tt}from"../chunks/DN14Fk2Y.js";import{e as re}from"../chunks/BZiHL9L3.js";import{e as qe,a as he}from"../chunks/DA-798Ko.js";import{U as rt}from"../chunks/XzGG0o_q.js";import{D as at}from"../chunks/kJjQhR0J.js";import{t as te}from"../chunks/BZUCTtPY.js";import{B as st,k as ye,l as ot}from"../chunks/cjRLNre3.js";import{D as nt,A as Ae,G as it,a as lt}from"../chunks/DCYYzf48.js";import{E as dt}from"../chunks/DAWfW-VQ.js";import{E as ct}from"../chunks/CARsAFuo.js";import{S as ut}from"../chunks/BJXodF8n.js";var pt=M('

                '),mt=M('

                Loading...

                '),ft=M(""),gt=M('

                Loading credentials...

                '),vt=M('

                No GitHub credentials found. Please create GitHub credentials first.

                '),bt=M(`

                You'll need to manually configure this secret in GitHub's enterprise webhook settings.

                `),yt=M('

                Create Enterprise

                Enterprises are only available for GitHub endpoints.

                ');function ht(ae,se){Se(se,!1);const[oe,ne]=Ne(),u=()=>Le(qe,"$eagerCache",oe),G=n(),h=n(),p=n(),H=n(),E=Je();let x=n(!1),y=n(""),a=n({name:"",credentials_name:"",webhook_secret:"",pool_balancer_type:"roundrobin"});async function _(){if(!u().loaded.credentials&&!u().loading.credentials)try{await he.getCredentials()}catch(m){r(y,re(m))}}async function C(){if(!e(a).name||!e(a).name.trim()){r(y,"Enterprise name is required");return}if(!e(a).credentials_name){r(y,"Please select credentials");return}try{r(x,!0),r(y,"");const m={...e(a)};E("submit",m)}catch(m){r(y,re(m)),r(x,!1)}}Be(()=>{_()}),w(()=>u(),()=>{r(G,u().credentials)}),w(()=>u(),()=>{r(h,u().loading.credentials)}),w(()=>e(G),()=>{r(p,e(G).filter(m=>m.forge_type==="github"))}),w(()=>e(a),()=>{r(H,e(a).name&&e(a).name.trim()!==""&&e(a).credentials_name!==""&&e(a).webhook_secret&&e(a).webhook_secret.trim()!=="")}),He(),Ge(),tt(ae,{$$events:{close:()=>E("close")},children:(m,q)=>{var D=yt(),d=o(i(D),4);{var ie=f=>{var g=pt(),T=i(g),R=i(T,!0);l(T),l(g),N(()=>K(R,e(y))),$(f,g)};B(d,f=>{e(y)&&f(ie)})}var le=o(d,2);{var de=f=>{var g=mt();$(f,g)},ce=f=>{var g=bt(),T=i(g),R=o(i(T),2);Pe(R),l(T);var z=o(T,2),U=o(i(z),2);N(()=>{e(a),$e(()=>{e(p)})});var j=i(U);j.value=j.__value="";var ue=o(j);Xe(ue,1,()=>e(p),Ze,(s,c)=>{var P=ft(),v=i(P);l(P);var k={};N(()=>{K(v,`${e(c),b(()=>e(c).name)??""} (${e(c),b(()=>e(c).endpoint?.name||"Unknown endpoint")??""})`),k!==(k=(e(c),b(()=>e(c).name)))&&(P.value=(P.__value=(e(c),b(()=>e(c).name)))??"")}),$(s,P)}),l(U);var pe=o(U,2);{var me=s=>{var c=gt();$(s,c)},O=s=>{var c=We(),P=Ue(c);{var v=k=>{var Y=vt();$(k,Y)};B(P,k=>{e(p),b(()=>e(p).length===0)&&k(v)},!0)}$(s,c)};B(pe,s=>{e(h)?s(me):s(O,!1)})}l(z);var S=o(z,2),L=o(i(S),2);N(()=>{e(a),$e(()=>{})});var F=i(L);F.value=F.__value="roundrobin";var I=o(F);I.value=I.__value="pack",l(L),l(S);var J=o(S,2),V=o(i(J),2);Pe(V),Ve(2),l(J);var Q=o(J,2),X=i(Q),W=o(X,2),t=i(W,!0);l(W),l(Q),l(g),N(()=>{W.disabled=(e(x),e(h),e(H),e(p),b(()=>e(x)||e(h)||!e(H)||e(p).length===0)),K(t,e(x)?"Creating...":"Create Enterprise")}),Te(R,()=>e(a).name,s=>Z(a,e(a).name=s)),Me(U,()=>e(a).credentials_name,s=>Z(a,e(a).credentials_name=s)),Me(L,()=>e(a).pool_balancer_type,s=>Z(a,e(a).pool_balancer_type=s)),Te(V,()=>e(a).webhook_secret,s=>Z(a,e(a).webhook_secret=s)),Ce("click",X,()=>E("close")),Ce("submit",g,et(C)),$(f,g)};B(le,f=>{e(x)?f(de):f(ce,!1)})}l(D),$(m,D)},$$slots:{default:!0}}),Re(),ne()}var _t=M(''),xt=M('
                ',1);function jt(ae,se){Se(se,!1);const[oe,ne]=Ne(),u=()=>Le(qe,"$eagerCache",oe),G=n(),h=n(),p=n(),H=n();let E=n([]),x=n(!0),y=n(""),a=n(""),_=n(1),C=n(25),m=n(!1),q=n(!1),D=n(!1),d=n(null);async function ie(t){try{r(y,""),await be.createEnterprise(t),te.success("Enterprise Created",`Enterprise ${t.name} has been created successfully.`),r(m,!1)}catch(s){throw r(y,re(s)),s}}async function le(t){if(e(d))try{await be.updateEnterprise(e(d).id,t),te.success("Enterprise Updated",`Enterprise ${e(d).name} has been updated successfully.`),r(q,!1),r(d,null)}catch(s){throw s}}async function de(){if(e(d))try{r(y,""),await be.deleteEnterprise(e(d).id),te.success("Enterprise Deleted",`Enterprise ${e(d).name} has been deleted successfully.`),r(D,!1),r(d,null)}catch(t){const s=re(t);te.error("Delete Failed",s)}}function ce(){r(m,!0)}function f(t){r(d,t),r(q,!0)}function g(t){r(d,t),r(D,!0)}Be(async()=>{try{r(x,!0);const t=await he.getEnterprises();t&&Array.isArray(t)&&r(E,t)}catch(t){console.error("Failed to load enterprises:",t),r(y,t instanceof Error?t.message:"Failed to load enterprises")}finally{r(x,!1)}});async function T(){try{await he.retryResource("enterprises")}catch(t){console.error("Retry failed:",t)}}const R=[{key:"name",title:"Name",cellComponent:dt,cellProps:{entityType:"enterprise"}},{key:"endpoint",title:"Endpoint",cellComponent:ct},{key:"credentials",title:"Credentials",cellComponent:it,cellProps:{field:"credentials_name"}},{key:"status",title:"Status",cellComponent:ut,cellProps:{statusType:"entity"}},{key:"actions",title:"Actions",align:"right",cellComponent:lt}],z={entityType:"enterprise",primaryText:{field:"name",isClickable:!0,href:"/enterprises/{id}"},secondaryText:{field:"credentials_name"},badges:[{type:"custom",value:t=>ye(t)}],actions:[{type:"edit",handler:t=>f(t)},{type:"delete",handler:t=>g(t)}]};function U(t){r(a,t.detail.term),r(_,1)}function j(t){r(_,t.detail.page)}function ue(t){r(C,t.detail.perPage),r(_,1)}function pe(t){f(t.detail.item)}function me(t){g(t.detail.item)}w(()=>(e(E),u()),()=>{(!e(E).length||u().loaded.enterprises)&&r(E,u().enterprises)}),w(()=>u(),()=>{r(x,u().loading.enterprises)}),w(()=>u(),()=>{r(G,u().errorMessages.enterprises)}),w(()=>(e(E),e(a)),()=>{r(h,ot(e(E),e(a)))}),w(()=>(e(h),e(C)),()=>{r(p,Math.ceil(e(h).length/e(C)))}),w(()=>(e(_),e(p)),()=>{e(_)>e(p)&&e(p)>0&&r(_,e(p))}),w(()=>(e(h),e(_),e(C)),()=>{r(H,e(h).slice((e(_)-1)*e(C),e(_)*e(C)))}),He(),Ge();var O=xt();Ye(t=>{Ke.title="Enterprises - GARM"});var S=Ue(O),L=i(S);Qe(L,{title:"Enterprises",description:"Manage GitHub enterprises",actionLabel:"Add Enterprise",$$events:{action:ce}});var F=o(L,2);{let t=ee(()=>e(G)||e(y)),s=ee(()=>!!e(G));nt(F,{get columns(){return R},get data(){return e(H)},get loading(){return e(x)},get error(){return e(t)},get searchTerm(){return e(a)},searchPlaceholder:"Search enterprises...",get currentPage(){return e(_)},get perPage(){return e(C)},get totalPages(){return e(p)},get totalItems(){return e(h),b(()=>e(h).length)},itemName:"enterprises",emptyIconType:"building",get showRetry(){return e(s)},get mobileCardConfig(){return z},$$events:{search:U,pageChange:j,perPageChange:ue,retry:T,edit:pe,delete:me},$$slots:{"mobile-card":(c,P)=>{const v=ee(()=>P.item),k=ee(()=>(A(ye),A(e(v)),b(()=>ye(e(v)))));var Y=_t(),fe=i(Y),ge=i(fe),ve=i(ge),ze=i(ve,!0);l(ve);var _e=o(ve,2),je=i(_e,!0);l(_e),l(ge),l(fe);var xe=o(fe,2),ke=i(xe);st(ke,{get variant(){return A(e(k)),b(()=>e(k).variant)},get text(){return A(e(k)),b(()=>e(k).text)}});var we=o(ke,2),Ee=i(we);Ae(Ee,{action:"edit",size:"sm",title:"Edit enterprise",ariaLabel:"Edit enterprise",$$events:{click:()=>f(e(v))}});var Fe=o(Ee,2);Ae(Fe,{action:"delete",size:"sm",title:"Delete enterprise",ariaLabel:"Delete enterprise",$$events:{click:()=>g(e(v))}}),l(we),l(xe),l(Y),N(Ie=>{Oe(ge,"href",Ie),K(ze,(A(e(v)),b(()=>e(v).name))),K(je,(A(e(v)),b(()=>e(v).credentials_name)))},[()=>(A(De),A(e(v)),b(()=>De(`/enterprises/${e(v).id}`)))]),$(c,Y)}}})}l(S);var I=o(S,2);{var J=t=>{ht(t,{$$events:{close:()=>r(m,!1),submit:s=>ie(s.detail)}})};B(I,t=>{e(m)&&t(J)})}var V=o(I,2);{var Q=t=>{rt(t,{get entity(){return e(d)},entityType:"enterprise",$$events:{close:()=>{r(q,!1),r(d,null)},submit:s=>le(s.detail)}})};B(V,t=>{e(q)&&e(d)&&t(Q)})}var X=o(V,2);{var W=t=>{at(t,{title:"Delete Enterprise",message:"Are you sure you want to delete this enterprise? This action cannot be undone.",get itemName(){return e(d),b(()=>e(d).name)},$$events:{close:()=>{r(D,!1),r(d,null)},confirm:de}})};B(X,t=>{e(D)&&e(d)&&t(W)})}$(ae,O),Re(),ne()}export{jt as component}; diff --git a/webapp/assets/_app/immutable/nodes/6.ZyhHjSpx.js b/webapp/assets/_app/immutable/nodes/6.ZyhHjSpx.js deleted file mode 100644 index f584460f..00000000 --- a/webapp/assets/_app/immutable/nodes/6.ZyhHjSpx.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as qe}from"../chunks/zNh6Oe5P.js";import{p as ze,o as Ge,A as je,l as Re,a as Ve,f as S,h as We,b as F,t as j,c as E,d as Je,u as i,n as le,v as de,g as e,m as l,j as u,k as d,s,$ as Ke,r as f,y as Oe,B as ce,q as v}from"../chunks/sWNKMed7.js";import{i as g,s as Qe,a as Xe}from"../chunks/Ccl3fNd2.js";import{d as B,c as Ye,g as y}from"../chunks/DVl4ZBgx.js";import{p as Ze}from"../chunks/CJwphPxi.js";import{g as pe}from"../chunks/CPCsbdkz.js";import{U as et}from"../chunks/XzGG0o_q.js";import{D as ue}from"../chunks/kJjQhR0J.js";import{E as tt,P as rt,a as at}from"../chunks/CaVdfWt-.js";import{D as st,I as nt}from"../chunks/0ZGtv6cq.js";import{g as fe}from"../chunks/cjRLNre3.js";import{w as R}from"../chunks/BuuPrWMc.js";import{t as x}from"../chunks/BZUCTtPY.js";import{C as ot}from"../chunks/BRFhz4VJ.js";import{e as V}from"../chunks/BZiHL9L3.js";var it=S('

                Loading enterprise...

                '),lt=S('

                '),dt=S(" ",1),ct=S(' ',1);function Pt(me,ve){ze(ve,!1);const[ge,ye]=Qe(),W=()=>Xe(Ze,"$page",ge),$=l();let r=l(null),c=l([]),m=l([]),U=l(!0),P=l(""),T=l(!1),M=l(!1),I=l(!1),C=l(!1),p=l(null),k=null,h=l();async function J(){if(e($))try{s(U,!0),s(P,"");const[t,a,n]=await Promise.all([y.getEnterprise(e($)),y.listEnterprisePools(e($)).catch(()=>[]),y.listEnterpriseInstances(e($)).catch(()=>[])]);s(r,t),s(c,a),s(m,n)}catch(t){s(P,t instanceof Error?t.message:"Failed to load enterprise")}finally{s(U,!1)}}function he(t,a){const{events:n}=t;return{...a,events:n}}async function _e(t){if(e(r))try{await y.updateEnterprise(e(r).id,t),await J(),x.success("Enterprise Updated",`Enterprise ${e(r).name} has been updated successfully.`),s(T,!1)}catch(a){throw a}}async function be(){if(e(r)){try{await y.deleteEnterprise(e(r).id),pe(B("/enterprises"))}catch(t){const a=V(t);x.error("Delete Failed",a)}s(M,!1)}}async function Ee(){if(e(p))try{await y.deleteInstance(e(p).name),x.success("Instance Deleted",`Instance ${e(p).name} has been deleted successfully.`),s(I,!1),s(p,null)}catch(t){const a=V(t);x.error("Delete Failed",a),s(I,!1),s(p,null)}}function xe(t){s(p,t),s(I,!0)}function $e(){s(C,!0)}async function Ie(t){try{if(!e(r))return;await y.createEnterprisePool(e(r).id,t.detail),x.success("Pool Created",`Pool has been created successfully for enterprise ${e(r).name}.`),s(C,!1)}catch(a){const n=V(a);x.error("Pool Creation Failed",n)}}function K(){e(h)&&Oe(h,e(h).scrollTop=e(h).scrollHeight)}function we(t){if(t.operation==="update"){const a=t.payload;if(e(r)&&a.id===e(r).id){const n=e(r).events?.length||0,o=a.events?.length||0;s(r,he(e(r),a)),o>n&&setTimeout(()=>{K()},100)}}else if(t.operation==="delete"){const a=t.payload.id||t.payload;e(r)&&e(r).id===a&&pe(B("/enterprises"))}}function De(t){if(!e(r))return;const a=t.payload;if(a.enterprise_id===e(r).id){if(t.operation==="create")s(c,[...e(c),a]);else if(t.operation==="update")s(c,e(c).map(n=>n.id===a.id?a:n));else if(t.operation==="delete"){const n=a.id||a;s(c,e(c).filter(o=>o.id!==n))}}}function Pe(t){if(!e(r)||!e(c))return;const a=t.payload;if(e(c).some(o=>o.id===a.pool_id)){if(t.operation==="create")s(m,[...e(m),a]);else if(t.operation==="update")s(m,e(m).map(o=>o.id===a.id?a:o));else if(t.operation==="delete"){const o=a.id||a;s(m,e(m).filter(q=>q.id!==o))}}}Ge(()=>{J().then(()=>{e(r)?.events?.length&&setTimeout(()=>{K()},100)});const t=R.subscribeToEntity("enterprise",["update","delete"],we),a=R.subscribeToEntity("pool",["create","update","delete"],De),n=R.subscribeToEntity("instance",["create","update","delete"],Pe);k=()=>{t(),a(),n()}}),je(()=>{k&&(k(),k=null)}),Re(()=>W(),()=>{s($,W().params.id)}),Ve(),qe();var O=ct();We(t=>{j(()=>Ke.title=`${e(r),i(()=>e(r)?`${e(r).name} - Enterprise Details`:"Enterprise Details")??""} - GARM`)});var L=F(O),H=u(L),Q=u(H),N=u(Q),Te=u(N);f(N);var X=d(N,2),Y=u(X),Z=d(u(Y),2),Me=u(Z,!0);f(Z),f(Y),f(X),f(Q),f(H);var Ce=d(H,2);{var ke=t=>{var a=it();E(t,a)},Ae=t=>{var a=ce(),n=F(a);{var o=_=>{var b=lt(),A=u(b),z=u(A,!0);f(A),f(b),j(()=>de(z,e(P))),E(_,b)},q=_=>{var b=ce(),A=F(b);{var z=G=>{var ae=dt(),se=F(ae);{let w=v(()=>(e(r),i(()=>e(r).name||"Enterprise"))),D=v(()=>(e(r),i(()=>e(r).endpoint?.name))),Ne=v(()=>(le(fe),i(()=>fe("github"))));st(se,{get title(){return e(w)},get subtitle(){return`Endpoint: ${e(D)??""} • GitHub Enterprise`},get forgeIcon(){return e(Ne)},onEdit:()=>s(T,!0),onDelete:()=>s(M,!0)})}var ne=d(se,2);tt(ne,{get entity(){return e(r)},entityType:"enterprise"});var oe=d(ne,2);{let w=v(()=>(e(r),i(()=>e(r).id||""))),D=v(()=>(e(r),i(()=>e(r).name||"")));rt(oe,{get pools(){return e(c)},entityType:"enterprise",get entityId(){return e(w)},get entityName(){return e(D)},$$events:{addPool:$e}})}var ie=d(oe,2);nt(ie,{get instances(){return e(m)},entityType:"enterprise",onDeleteInstance:xe});var He=d(ie,2);{let w=v(()=>(e(r),i(()=>e(r)?.events)));at(He,{get events(){return e(w)},get eventsContainer(){return e(h)},set eventsContainer(D){s(h,D)},$$legacy:!0})}E(G,ae)};g(A,G=>{e(r)&&G(z)},!0)}E(_,b)};g(n,_=>{e(P)?_(o):_(q,!1)},!0)}E(t,a)};g(Ce,t=>{e(U)?t(ke):t(Ae,!1)})}f(L);var ee=d(L,2);{var Fe=t=>{et(t,{get entity(){return e(r)},entityType:"enterprise",$$events:{close:()=>s(T,!1),submit:a=>_e(a.detail)}})};g(ee,t=>{e(T)&&e(r)&&t(Fe)})}var te=d(ee,2);{var Be=t=>{ue(t,{title:"Delete Enterprise",message:"Are you sure you want to delete this enterprise? This action cannot be undone and will remove all associated pools and instances.",get itemName(){return e(r),i(()=>e(r).name)},$$events:{close:()=>s(M,!1),confirm:be}})};g(te,t=>{e(M)&&e(r)&&t(Be)})}var re=d(te,2);{var Se=t=>{ue(t,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return e(p),i(()=>e(p).name)},$$events:{close:()=>{s(I,!1),s(p,null)},confirm:Ee}})};g(re,t=>{e(I)&&e(p)&&t(Se)})}var Ue=d(re,2);{var Le=t=>{{let a=v(()=>(e(r),i(()=>e(r).id||"")));ot(t,{initialEntityType:"enterprise",get initialEntityId(){return e(a)},$$events:{close:()=>s(C,!1),submit:Ie}})}};g(Ue,t=>{e(C)&&e(r)&&t(Le)})}j(t=>{Ye(Te,"href",t),de(Me,(e(r),i(()=>e(r)?e(r).name:"Loading...")))},[()=>(le(B),i(()=>B("/enterprises")))]),E(me,O),Je(),ye()}export{Pt as component}; diff --git a/webapp/assets/_app/immutable/nodes/7.CzX45v88.js b/webapp/assets/_app/immutable/nodes/7.CzX45v88.js deleted file mode 100644 index f589ffea..00000000 --- a/webapp/assets/_app/immutable/nodes/7.CzX45v88.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as Ye}from"../chunks/zNh6Oe5P.js";import{p as Ze,o as er,l as A,a as rr,f as m,h as ar,t as re,g as e,e as tr,c as n,d as dr,m as i,$ as lr,j as a,u,n as $e,k as d,s,r as t,z as L,C as sr,b as or,D as ir,v as Ue,q as nr}from"../chunks/sWNKMed7.js";import{i as p,s as vr,a as mr}from"../chunks/Ccl3fNd2.js";import{B as ze,d as y,c as Me,s as N,r as P}from"../chunks/DVl4ZBgx.js";import{b as q}from"../chunks/CLagxtgo.js";import{p as cr}from"../chunks/D4Caz1gY.js";import{g as W}from"../chunks/CPCsbdkz.js";import{a as ur,b as pr}from"../chunks/CVQRp8zk.js";import{t as br}from"../chunks/BZUCTtPY.js";import{e as gr}from"../chunks/BZiHL9L3.js";var fr=m('

                Username is required

                '),xr=m('

                Please enter a valid email address

                '),hr=m('

                Full name is required

                '),yr=m('

                Password must be at least 8 characters long

                '),kr=m('

                Passwords do not match

                '),_r=sr(' Advanced Configuration (Optional)',1),wr=m('

                URL where runners can fetch metadata and setup information.

                URL where runners send status updates and lifecycle events.

                URL where GitHub/Gitea will send webhook events for job notifications.

                '),$r=m("
              • Enter a username
              • "),Ur=m("
              • Enter a valid email address
              • "),zr=m("
              • Enter your full name
              • "),Mr=m("
              • Enter a password with at least 8 characters
              • "),Rr=m("
              • Confirm your password
              • "),Ar=m('

                Please complete all required fields

                '),Pr=m('

                '),qr=m('
                GARM

                Welcome to GARM

                Complete the first-run setup to get started

                First-Run Initialization

                GARM needs to be initialized before first use. This will create the admin user and generate a unique controller ID for this installation.

                This will create the admin user, generate a unique controller ID, and configure the required URLs for your GARM installation.
                Make sure to remember these credentials as they cannot be recovered.

                ');function Dr(Re,Ae){Ze(Ae,!1);const[Pe,qe]=vr(),C=()=>mr(ur,"$authStore",Pe),_=i(),w=i(),$=i(),U=i(),z=i(),S=i();let f=i("admin"),b=i("admin@garm.local"),c=i(""),x=i(""),h=i("Administrator"),j=i(!1),H=i(""),T=i(!1),I=i(""),E=i(""),V=i("");async function Ce(){if(e(S))try{s(j,!0),s(H,""),await pr.initialize(e(f).trim(),e(b).trim(),e(c),e(h).trim(),{callbackUrl:e(I).trim()||void 0,metadataUrl:e(E).trim()||void 0,webhookUrl:e(V).trim()||void 0}),br.success("GARM Initialized","GARM has been successfully initialized. Welcome!"),W(y("/"))}catch(r){s(H,gr(r))}finally{s(j,!1)}}er(()=>{if(C().isAuthenticated){W(y("/"));return}!C().needsInitialization&&!C().loading&&W(y("/login"))}),A(()=>(e(I),e(E),e(V)),()=>{if(typeof window<"u"){const r=window.location.origin;e(I)||s(I,`${r}/api/v1/callbacks`),e(E)||s(E,`${r}/api/v1/metadata`),e(V)||s(V,`${r}/webhooks`)}}),A(()=>e(b),()=>{s(_,e(b).trim()!==""&&e(b).includes("@"))}),A(()=>e(c),()=>{s(w,e(c).length>=8)}),A(()=>(e(x),e(c)),()=>{s($,e(x).length>0&&e(c)===e(x))}),A(()=>e(f),()=>{s(U,e(f).trim()!=="")}),A(()=>e(h),()=>{s(z,e(h).trim()!=="")}),A(()=>(e(U),e(_),e(z),e(w),e($)),()=>{s(S,e(U)&&e(_)&&e(z)&&e(w)&&e($))}),A(()=>(C(),y),()=>{C().isAuthenticated?W(y("/")):!C().needsInitialization&&!C().loading&&W(y("/login"))}),rr(),Ye();var ae=qr();ar(r=>{lr.title="Initialize GARM - First Run Setup"});var te=a(ae),me=a(te),ce=a(me),Ge=d(ce,2);t(me),L(4),t(te);var ue=d(te,2),pe=d(a(ue),2),de=a(pe),le=a(de),be=d(a(le),2),O=a(be);P(O);var Le=d(O,2);{var Ie=r=>{var l=fr();n(r,l)};p(Le,r=>{e(U),e(f),u(()=>!e(U)&&e(f).length>0)&&r(Ie)})}t(be),t(le);var se=d(le,2),ge=d(a(se),2),J=a(ge);P(J);var Ee=d(J,2);{var Ve=r=>{var l=xr();n(r,l)};p(Ee,r=>{e(_),e(b),u(()=>!e(_)&&e(b).length>0)&&r(Ve)})}t(ge),t(se);var oe=d(se,2),fe=d(a(oe),2),K=a(fe);P(K);var Be=d(K,2);{var Ne=r=>{var l=hr();n(r,l)};p(Be,r=>{e(z),e(h),u(()=>!e(z)&&e(h).length>0)&&r(Ne)})}t(fe),t(oe);var ie=d(oe,2),xe=d(a(ie),2),Q=a(xe);P(Q);var Se=d(Q,2);{var je=r=>{var l=yr();n(r,l)};p(Se,r=>{e(w),e(c),u(()=>!e(w)&&e(c).length>0)&&r(je)})}t(xe),t(ie);var ne=d(ie,2),he=d(a(ne),2),X=a(he);P(X);var Fe=d(X,2);{var De=r=>{var l=kr();n(r,l)};p(Fe,r=>{e($),e(x),u(()=>!e($)&&e(x).length>0)&&r(De)})}t(he),t(ne);var ve=d(ne,2),ye=a(ve);ze(ye,{type:"button",variant:"ghost",size:"sm",$$events:{click:()=>s(T,!e(T))},children:(r,l)=>{var g=_r(),v=or(g);L(),re(()=>N(v,0,`w-4 h-4 mr-2 transition-transform ${e(T)?"rotate-90":""}`)),n(r,g)},$$slots:{default:!0}});var We=d(ye,2);{var He=r=>{var l=wr(),g=a(l),v=a(g),M=d(a(v),2),G=a(M);P(G),L(2),t(M),t(v);var B=d(v,2),Y=d(a(B),2),F=a(Y);P(F),L(2),t(Y),t(B);var Z=d(B,2),D=d(a(Z),2),ee=a(D);P(ee),L(2),t(D),t(Z),t(g),t(l),q(G,()=>e(E),R=>s(E,R)),q(F,()=>e(I),R=>s(I,R)),q(ee,()=>e(V),R=>s(V,R)),n(r,l)};p(We,r=>{e(T)&&r(He)})}t(ve);var ke=d(ve,2);{var Te=r=>{var l=Ar(),g=a(l),v=d(a(g),2),M=d(a(v),2),G=a(M),B=a(G);{var Y=o=>{var k=$r();n(o,k)};p(B,o=>{e(U)||o(Y)})}var F=d(B,2);{var Z=o=>{var k=Ur();n(o,k)};p(F,o=>{e(_)||o(Z)})}var D=d(F,2);{var ee=o=>{var k=zr();n(o,k)};p(D,o=>{e(z)||o(ee)})}var R=d(D,2);{var Ke=o=>{var k=Mr();n(o,k)};p(R,o=>{e(w)||o(Ke)})}var Qe=d(R,2);{var Xe=o=>{var k=Rr();n(o,k)};p(Qe,o=>{e($)||o(Xe)})}t(G),t(M),t(v),t(g),t(l),n(r,l)};p(ke,r=>{e(S),e(f),e(b),e(h),e(c),e(x),u(()=>!e(S)&&(e(f).length>0||e(b).length>0||e(h).length>0||e(c).length>0||e(x).length>0))&&r(Te)})}var _e=d(ke,2);{var Oe=r=>{var l=Pr(),g=a(l),v=d(a(g),2),M=a(v),G=a(M,!0);t(M),t(v),t(g),t(l),re(()=>Ue(G,e(H))),n(r,l)};p(_e,r=>{e(H)&&r(Oe)})}var we=d(_e,2),Je=a(we);{let r=nr(()=>!e(S)||e(j));ze(Je,{type:"submit",variant:"primary",size:"lg",fullWidth:!0,get loading(){return e(j)},get disabled(){return e(r)},children:(l,g)=>{L();var v=ir();re(()=>Ue(v,e(j)?"Initializing...":"Initialize GARM")),n(l,v)},$$slots:{default:!0}})}t(we),t(de),L(2),t(pe),t(ue),t(ae),re((r,l)=>{Me(ce,"src",r),Me(Ge,"src",l),N(O,1,`appearance-none block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 dark:placeholder-gray-500 focus:outline-none focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:text-white sm:text-sm ${e(U),e(f),u(()=>!e(U)&&e(f).length>0?"border-red-300 dark:border-red-600":"")??""}`),N(J,1,`appearance-none block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 dark:placeholder-gray-500 focus:outline-none focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:text-white sm:text-sm ${e(_),e(b),u(()=>!e(_)&&e(b).length>0?"border-red-300 dark:border-red-600":"")??""}`),N(K,1,`appearance-none block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 dark:placeholder-gray-500 focus:outline-none focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:text-white sm:text-sm ${e(z),e(h),u(()=>!e(z)&&e(h).length>0?"border-red-300 dark:border-red-600":"")??""}`),N(Q,1,`appearance-none block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 dark:placeholder-gray-500 focus:outline-none focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:text-white sm:text-sm ${e(w),e(c),u(()=>!e(w)&&e(c).length>0?"border-red-300 dark:border-red-600":"")??""}`),N(X,1,`appearance-none block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 dark:placeholder-gray-500 focus:outline-none focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:text-white sm:text-sm ${e($),e(x),u(()=>!e($)&&e(x).length>0?"border-red-300 dark:border-red-600":"")??""}`)},[()=>($e(y),u(()=>y("/assets/garm-light.svg"))),()=>($e(y),u(()=>y("/assets/garm-dark.svg")))]),q(O,()=>e(f),r=>s(f,r)),q(J,()=>e(b),r=>s(b,r)),q(K,()=>e(h),r=>s(h,r)),q(Q,()=>e(c),r=>s(c,r)),q(X,()=>e(x),r=>s(x,r)),tr("submit",de,cr(Ce)),n(Re,ae),dr(),qe()}export{Dr as component}; diff --git a/webapp/assets/_app/immutable/nodes/8.BUYkNug1.js b/webapp/assets/_app/immutable/nodes/8.BUYkNug1.js deleted file mode 100644 index d470cee3..00000000 --- a/webapp/assets/_app/immutable/nodes/8.BUYkNug1.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as z}from"../chunks/zNh6Oe5P.js";import{p as J,f as C,c as x,d as K,j as y,n as f,u as g,r as b,t as M,v as R,B as ce,b as O,o as de,A as ue,l as w,a as me,h as pe,g as e,m as u,$ as fe,q as ge,k as T,s}from"../chunks/sWNKMed7.js";import{p as ve,i as A}from"../chunks/Ccl3fNd2.js";import{d as D,c as $,g as H}from"../chunks/DVl4ZBgx.js";import{D as he}from"../chunks/kJjQhR0J.js";import{P as _e}from"../chunks/qfys27k5.js";import{w as ye}from"../chunks/BuuPrWMc.js";import{t as be}from"../chunks/BZUCTtPY.js";import{D as xe,G as Ce,a as Pe}from"../chunks/DCYYzf48.js";import{e as ke}from"../chunks/BZiHL9L3.js";import{E as Ie}from"../chunks/DAWfW-VQ.js";import"../chunks/C6O4o7G1.js";import{S as W}from"../chunks/BJXodF8n.js";var Se=C(' '),we=C(' '),Te=C('-'),De=C('
                ');function $e(E,k){J(k,!1);let a=ve(k,"item",8);z();var m=De(),I=y(m);{var i=r=>{var p=Se(),h=y(p);b(p),M(n=>{$(p,"href",n),$(p,"title",`Pool: ${f(a()),g(()=>a().pool_id)??""}`),R(h,`Pool: ${f(a()),g(()=>a().pool_id)??""}`)},[()=>(f(D),f(a()),g(()=>D(`/pools/${a().pool_id}`)))]),x(r,p)},P=r=>{var p=ce(),h=O(p);{var n=l=>{var o=we(),c=y(o);b(o),M(S=>{$(o,"href",S),$(o,"title",`Scale Set: ${f(a()),g(()=>a().scale_set_id)??""}`),R(c,`Scale Set: ${f(a()),g(()=>a().scale_set_id)??""}`)},[()=>(f(D),f(a()),g(()=>D(`/scalesets/${a().scale_set_id}`)))]),x(l,o)},v=l=>{var o=Te();x(l,o)};A(h,l=>{f(a()),g(()=>a()?.scale_set_id)?l(n):l(v,!1)},!0)}x(r,p)};A(I,r=>{f(a()),g(()=>a()?.pool_id)?r(i):r(P,!1)})}b(m),x(E,m),K()}var Ae=C('

                Error

                '),Ee=C('
                ',1);function Oe(E,k){J(k,!1);const a=u(),m=u(),I=u();let i=u([]),P=u(!0),r=u(""),p="",h=null,n=u(1),v=u(25),l=u(""),o=u(!1),c=u(null);async function S(){try{s(P,!0),s(r,""),s(i,await H.listInstances())}catch(t){s(r,t instanceof Error?t.message:"Failed to load instances")}finally{s(P,!1)}}function F(t){s(c,t),s(o,!0)}async function Q(){if(e(c))try{await H.deleteInstance(e(c).name),be.success("Instance Deleted",`Instance ${e(c).name} has been deleted successfully.`)}catch(t){s(r,ke(t))}finally{s(o,!1),s(c,null)}}const U=[{key:"name",title:"Name",cellComponent:Ie,cellProps:{entityType:"instance",showId:!0}},{key:"pool_scale_set",title:"Pool/Scale Set",flexible:!0,cellComponent:$e},{key:"created",title:"Created",cellComponent:Ce,cellProps:{field:"created_at",type:"date"}},{key:"status",title:"Status",cellComponent:W,cellProps:{statusType:"instance",statusField:"status"}},{key:"runner_status",title:"Runner Status",cellComponent:W,cellProps:{statusType:"instance",statusField:"runner_status"}},{key:"actions",title:"Actions",align:"right",cellComponent:Pe,cellProps:{actions:[{type:"delete",title:"Delete",ariaLabel:"Delete instance",action:"delete"}]}}],V={entityType:"instance",primaryText:{field:"name",isClickable:!0,href:"/instances/{name}"},secondaryText:{field:"provider_id"},badges:[{type:"status",field:"status"},{type:"status",field:"runner_status"}],actions:[{type:"delete",handler:t=>F(t)}]};function X(t){s(l,t.detail.term),s(n,1)}function Y(t){s(n,t.detail.page)}function Z(t){s(v,t.detail.perPage),s(n,1)}async function ee(){try{await S()}catch(t){console.error("Retry failed:",t)}}function te(t){}function ae(t){F(t.detail.item)}function se(t){if(t.operation==="create"){const d=t.payload;s(i,[...e(i),d])}else if(t.operation==="update"){const d=t.payload;s(i,e(i).map(_=>_.name===d.name?d:_))}else if(t.operation==="delete"){const d=t.payload.name||t.payload;s(i,e(i).filter(_=>_.name!==d))}}de(()=>{S(),h=ye.subscribeToEntity("instance",["create","update","delete"],se)}),ue(()=>{h&&(h(),h=null)}),w(()=>(e(i),e(l)),()=>{s(a,e(i).filter(t=>(e(l)===""||t.name?.toLowerCase().includes(e(l).toLowerCase())||t.provider_id?.toLowerCase().includes(e(l).toLowerCase()))&&p===""))}),w(()=>(e(a),e(v)),()=>{s(m,Math.ceil(e(a).length/e(v)))}),w(()=>(e(n),e(m)),()=>{e(n)>e(m)&&e(m)>0&&s(n,e(m))}),w(()=>(e(a),e(n),e(v)),()=>{s(I,e(a).slice((e(n)-1)*e(v),e(n)*e(v)))}),me(),z();var N=Ee();pe(t=>{fe.title="Instances - GARM"});var L=O(N),G=y(L);_e(G,{title:"Runner Instances",description:"Monitor your running instances",showAction:!1});var q=T(G,2);{var re=t=>{var d=Ae(),_=y(d),j=y(_),B=T(y(j),2),ie=y(B,!0);b(B),b(j),b(_),b(d),M(()=>R(ie,e(r))),x(t,d)};A(q,t=>{e(r)&&t(re)})}var ne=T(q,2);{let t=ge(()=>!!e(r));xe(ne,{get columns(){return U},get data(){return e(I)},get loading(){return e(P)},get error(){return e(r)},get searchTerm(){return e(l)},searchPlaceholder:"Search instances...",get currentPage(){return e(n)},get perPage(){return e(v)},get totalPages(){return e(m)},get totalItems(){return e(a),g(()=>e(a).length)},itemName:"instances",emptyIconType:"cog",get showRetry(){return e(t)},get mobileCardConfig(){return V},$$events:{search:X,pageChange:Y,perPageChange:Z,retry:ee,edit:te,delete:ae}})}b(L);var le=T(L,2);{var oe=t=>{he(t,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return e(c),g(()=>e(c).name)},$$events:{close:()=>{s(o,!1),s(c,null)},confirm:Q}})};A(le,t=>{e(o)&&e(c)&&t(oe)})}x(E,N),K()}export{Oe as component}; diff --git a/webapp/assets/_app/immutable/nodes/9.D2F8Tae4.js b/webapp/assets/_app/immutable/nodes/9.D2F8Tae4.js deleted file mode 100644 index 6e8fbfe9..00000000 --- a/webapp/assets/_app/immutable/nodes/9.D2F8Tae4.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as $e}from"../chunks/zNh6Oe5P.js";import{p as Ue,o as Le,A as Oe,l as Pe,a as Ve,f as u,h as ze,b as P,t as f,c as v,d as Re,u as r,n as p,v as m,g as t,m as A,j as s,k as d,s as h,$ as qe,r as a,B as Tt,e as Fe,q as $t,D as Ut}from"../chunks/sWNKMed7.js";import{i as g,s as Ge,a as He}from"../chunks/Ccl3fNd2.js";import{w as We,e as Lt,i as Ot}from"../chunks/BuuPrWMc.js";import{d as j,c as nt,g as Pt,s as Vt}from"../chunks/DVl4ZBgx.js";import{b as Je}from"../chunks/_gFYyMUN.js";import{p as Ke}from"../chunks/CJwphPxi.js";import{g as zt}from"../chunks/CPCsbdkz.js";import{D as Qe}from"../chunks/kJjQhR0J.js";import{g as V,f as z}from"../chunks/ow_oMtSd.js";import{s as Rt,b as B,B as qt,d as Ft}from"../chunks/cjRLNre3.js";import{e as Xe}from"../chunks/BZiHL9L3.js";var Ye=u('

                Error

                '),Ze=u('

                Loading instance details...

                '),ta=u(' '),ea=u(' '),aa=u('-'),sa=u('
                Updated At:
                '),ra=u('
                '),da=u('
                Network Addresses:
                '),ia=u('
                Network Addresses:
                No addresses available
                '),na=u('
                OS Type:
                '),la=u('
                OS Name:
                '),oa=u('
                OS Version:
                '),va=u('
                OS Architecture:
                '),xa=u('

                '),ca=u('

                Status Messages

                '),ma=u('

                Status Messages

                No status messages available

                '),ua=u('

                Instance Information

                ID:
                Name:
                Provider ID:
                Provider:
                Pool/Scale Set:
                Agent ID:
                Created At:

                Status & Network

                Instance Status:
                Runner Status:
                ',1),ga=u('
                Instance not found.
                '),_a=u(' ',1);function Ba(Gt,Ht){Ue(Ht,!1);const[Wt,Jt]=Ge(),lt=()=>He(Ke,"$page",Wt),R=A();let e=A(null),q=A(!0),N=A(""),E=A(!1),T=null,$=A();async function Kt(){if(t(R))try{h(q,!0),h(N,""),h(e,await Pt.getInstance(t(R)))}catch(l){h(N,l instanceof Error?l.message:"Failed to load instance")}finally{h(q,!1)}}async function Qt(){if(t(e)){try{await Pt.deleteInstance(t(e).name),zt(j("/instances"))}catch(l){h(N,Xe(l))}h(E,!1)}}function Xt(l){if(t(e))if(l.operation==="update"&&l.payload.id===t(e).id){const b=t(e).status_messages?.length||0,D={...t(e),...l.payload},M=D.status_messages?.length||0;h(e,D),M>b&&setTimeout(()=>{Rt(t($))},100)}else l.operation==="delete"&&(l.payload.id||l.payload)===t(e).id&&zt(j("/instances"))}Le(()=>{Kt().then(()=>{t(e)?.status_messages?.length&&setTimeout(()=>{Rt(t($))},100)}),T=We.subscribeToEntity("instance",["update","delete"],Xt)}),Oe(()=>{T&&(T(),T=null)}),Pe(()=>lt(),()=>{h(R,decodeURIComponent(lt().params.id||""))}),Ve(),$e();var ot=_a();ze(l=>{f(()=>qe.title=`${t(e),r(()=>t(e)?`${t(e).name} - Instance Details`:"Instance Details")??""} - GARM`)});var F=P(ot),G=s(F),vt=s(G),H=s(vt),Yt=s(H);a(H);var xt=d(H,2),ct=s(xt),mt=d(s(ct),2),Zt=s(mt,!0);a(mt),a(ct),a(xt),a(vt),a(G);var ut=d(G,2);{var te=l=>{var b=Ye(),D=s(b),M=s(D),U=d(s(M),2),I=s(U,!0);a(U),a(M),a(D),a(b),f(()=>m(I,t(N))),v(l,b)};g(ut,l=>{t(N)&&l(te)})}var ee=d(ut,2);{var ae=l=>{var b=Ze();v(l,b)},se=l=>{var b=Tt(),D=P(b);{var M=I=>{var L=ua(),W=P(L),J=s(W),K=s(J),gt=d(s(K),2),ie=s(gt);a(gt),a(K);var _t=d(K,2),Q=s(_t),ft=d(s(Q),2),ne=s(ft,!0);a(ft),a(Q);var X=d(Q,2),yt=d(s(X),2),le=s(yt,!0);a(yt),a(X);var Y=d(X,2),pt=d(s(Y),2),oe=s(pt,!0);a(pt),a(Y);var Z=d(Y,2),ht=d(s(Z),2),ve=s(ht,!0);a(ht),a(Z);var tt=d(Z,2),bt=d(s(tt),2),xe=s(bt);{var ce=i=>{var n=ta(),o=s(n,!0);a(n),f(x=>{nt(n,"href",x),m(o,(t(e),r(()=>t(e).pool_id)))},[()=>(p(j),t(e),r(()=>j(`/pools/${t(e).pool_id}`)))]),v(i,n)},me=i=>{var n=Tt(),o=P(n);{var x=_=>{var y=ea(),S=s(y,!0);a(y),f(C=>{nt(y,"href",C),m(S,(t(e),r(()=>t(e).scale_set_id)))},[()=>(p(j),t(e),r(()=>j(`/scalesets/${t(e).scale_set_id}`)))]),v(_,y)},c=_=>{var y=aa();v(_,y)};g(o,_=>{t(e),r(()=>t(e).scale_set_id)?_(x):_(c,!1)},!0)}v(i,n)};g(xe,i=>{t(e),r(()=>t(e).pool_id)?i(ce):i(me,!1)})}a(bt),a(tt);var et=d(tt,2),kt=d(s(et),2),ue=s(kt,!0);a(kt),a(et);var at=d(et,2),wt=d(s(at),2),ge=s(wt,!0);a(wt),a(at);var _e=d(at,2);{var fe=i=>{var n=sa(),o=d(s(n),2),x=s(o,!0);a(o),a(n),f(c=>m(x,c),[()=>(p(B),t(e),r(()=>B(t(e).updated_at)))]),v(i,n)};g(_e,i=>{t(e),r(()=>t(e).updated_at&&t(e).updated_at!==t(e).created_at)&&i(fe)})}a(_t),a(J);var It=d(J,2),jt=d(s(It),2),st=s(jt),Dt=d(s(st),2),rt=s(Dt),ye=s(rt,!0);a(rt),a(Dt),a(st);var dt=d(st,2),St=d(s(dt),2),it=s(St),pe=s(it,!0);a(it),a(St),a(dt);var Mt=d(dt,2);{var he=i=>{var n=da(),o=d(s(n),2);Lt(o,5,()=>(t(e),r(()=>t(e).addresses)),Ot,(x,c)=>{var _=ra(),y=s(_),S=s(y,!0);a(y);var C=d(y,2);{let O=$t(()=>(t(c),r(()=>t(c).type||"Unknown")));qt(C,{variant:"info",get text(){return t(O)}})}a(_),f(()=>m(S,(t(c),r(()=>t(c).address)))),v(x,_)}),a(o),a(n),v(i,n)},be=i=>{var n=ia();v(i,n)};g(Mt,i=>{t(e),r(()=>t(e).addresses&&t(e).addresses.length>0)?i(he):i(be,!1)})}var At=d(Mt,2);{var ke=i=>{var n=na(),o=d(s(n),2),x=s(o,!0);a(o),a(n),f(()=>m(x,(t(e),r(()=>t(e).os_type)))),v(i,n)};g(At,i=>{t(e),r(()=>t(e).os_type)&&i(ke)})}var Bt=d(At,2);{var we=i=>{var n=la(),o=d(s(n),2),x=s(o,!0);a(o),a(n),f(()=>m(x,(t(e),r(()=>t(e).os_name)))),v(i,n)};g(Bt,i=>{t(e),r(()=>t(e).os_name)&&i(we)})}var Nt=d(Bt,2);{var Ie=i=>{var n=oa(),o=d(s(n),2),x=s(o,!0);a(o),a(n),f(()=>m(x,(t(e),r(()=>t(e).os_version)))),v(i,n)};g(Nt,i=>{t(e),r(()=>t(e).os_version)&&i(Ie)})}var je=d(Nt,2);{var De=i=>{var n=va(),o=d(s(n),2),x=s(o,!0);a(o),a(n),f(()=>m(x,(t(e),r(()=>t(e).os_arch)))),v(i,n)};g(je,i=>{t(e),r(()=>t(e).os_arch)&&i(De)})}a(jt),a(It),a(W);var Se=d(W,2);{var Me=i=>{var n=ca(),o=d(s(n),2);Lt(o,5,()=>(t(e),r(()=>t(e).status_messages)),Ot,(x,c)=>{var _=xa(),y=s(_),S=s(y),C=s(S,!0);a(S);var O=d(S,2),Ct=s(O);{var Be=k=>{const w=$t(()=>(p(Ft),t(c),r(()=>Ft(t(c).event_level))));qt(k,{get variant(){return p(t(w)),r(()=>t(w).variant)},get text(){return p(t(w)),r(()=>t(w).text)}})};g(Ct,k=>{t(c),r(()=>t(c).event_level)&&k(Be)})}var Et=d(Ct,2),Ne=s(Et);{var Ce=k=>{var w=Ut();f(Te=>m(w,Te),[()=>(p(B),t(c),r(()=>B(t(c).created_at)))]),v(k,w)},Ee=k=>{var w=Ut("Unknown date");v(k,w)};g(Ne,k=>{t(c),r(()=>t(c).created_at)?k(Ce):k(Ee,!1)})}a(Et),a(O),a(y),a(_),f(()=>m(C,(t(c),r(()=>t(c).message)))),v(x,_)}),a(o),Je(o,x=>h($,x),()=>t($)),a(n),v(i,n)},Ae=i=>{var n=ma();v(i,n)};g(Se,i=>{t(e),r(()=>t(e).status_messages&&t(e).status_messages.length>0)?i(Me):i(Ae,!1)})}f((i,n,o,x,c)=>{m(ne,(t(e),r(()=>t(e).id))),m(le,(t(e),r(()=>t(e).name))),m(oe,(t(e),r(()=>t(e).provider_id))),m(ve,(t(e),r(()=>t(e).provider_name||"Unknown"))),m(ue,(t(e),r(()=>t(e).agent_id||"Not assigned"))),m(ge,i),Vt(rt,1,`inline-flex px-2 py-1 text-xs font-semibold rounded-full ring-1 ring-inset ${n??""}`),m(ye,o),Vt(it,1,`inline-flex px-2 py-1 text-xs font-semibold rounded-full ring-1 ring-inset ${x??""}`),m(pe,c)},[()=>(p(B),t(e),r(()=>B(t(e).created_at))),()=>(p(V),t(e),r(()=>V(t(e).status||"unknown"))),()=>(p(z),t(e),r(()=>z(t(e).status||"unknown"))),()=>(p(V),t(e),r(()=>V(t(e).runner_status||"unknown"))),()=>(p(z),t(e),r(()=>z(t(e).runner_status||"unknown")))]),Fe("click",ie,()=>h(E,!0)),v(I,L)},U=I=>{var L=ga();v(I,L)};g(D,I=>{t(e)?I(M):I(U,!1)},!0)}v(l,b)};g(ee,l=>{t(q)?l(ae):l(se,!1)})}a(F);var re=d(F,2);{var de=l=>{Qe(l,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return t(e),r(()=>t(e).name)},$$events:{close:()=>h(E,!1),confirm:Qt}})};g(re,l=>{t(E)&&t(e)&&l(de)})}f(l=>{nt(Yt,"href",l),m(Zt,(t(e),r(()=>t(e)?t(e).name:"Instance Details")))},[()=>(p(j),r(()=>j("/instances")))]),v(Gt,ot),Re(),Jt()}export{Ba as component}; diff --git a/webapp/assets/_app/version.json b/webapp/assets/_app/version.json deleted file mode 100644 index 41293260..00000000 --- a/webapp/assets/_app/version.json +++ /dev/null @@ -1 +0,0 @@ -{"version":"1756138203019"} \ No newline at end of file diff --git a/webapp/assets/assets.go b/webapp/assets/assets.go deleted file mode 100644 index 613baa0d..00000000 --- a/webapp/assets/assets.go +++ /dev/null @@ -1,83 +0,0 @@ -package assets - -import ( - "embed" - "net/http" - "path/filepath" - "strings" -) - -//go:generate go run github.com/go-swagger/go-swagger/cmd/swagger@v0.31.0 generate spec --output=../swagger.yaml --scan-models --work-dir=../../ -//go:generate go run github.com/go-swagger/go-swagger/cmd/swagger@v0.31.0 validate ../swagger.yaml -//go:generate rm -rf ../src/lib/api/generated -//go:generate openapi-generator-cli generate --skip-validate-spec -i ../swagger.yaml -g typescript-axios -o ../src/lib/api/generated - -//go:embed all:* -var EmbeddedSPA embed.FS - -// GetSPAFileSystem returns the embedded SPA file system for use with http.FileServer -func GetSPAFileSystem() http.FileSystem { - return http.FS(EmbeddedSPA) -} - -// ServeSPA serves the embedded SPA with proper content types and SPA routing -// This is kept for backward compatibility -func ServeSPA(w http.ResponseWriter, r *http.Request) { - ServeSPAWithPath(w, r, "/ui/") -} - -// ServeSPAWithPath serves the embedded SPA with a custom webapp path -func ServeSPAWithPath(w http.ResponseWriter, r *http.Request, webappPath string) { - filename := strings.TrimPrefix(r.URL.Path, webappPath) - - // Handle root path and SPA routing - serve index.html for all routes - if filename == "" || !strings.Contains(filename, ".") { - filename = "index.html" - } - - // Security check - prevent directory traversal - if strings.Contains(filename, "..") { - http.NotFound(w, r) - return - } - - // Read file from embedded filesystem - content, err := EmbeddedSPA.ReadFile(filename) - if err != nil { - // If file not found, serve index.html for SPA routing - content, err = EmbeddedSPA.ReadFile("index.html") - if err != nil { - http.NotFound(w, r) - return - } - filename = "index.html" - } - - // Set appropriate content type based on file extension - ext := strings.ToLower(filepath.Ext(filename)) - switch ext { - case ".html": - w.Header().Set("Content-Type", "text/html; charset=utf-8") - case ".js": - w.Header().Set("Content-Type", "application/javascript") - case ".css": - w.Header().Set("Content-Type", "text/css") - case ".json": - w.Header().Set("Content-Type", "application/json") - case ".svg": - w.Header().Set("Content-Type", "image/svg+xml") - case ".png": - w.Header().Set("Content-Type", "image/png") - default: - w.Header().Set("Content-Type", "text/plain") - } - - // Set cache headers for static assets (but not for HTML to ensure fresh content) - if ext != ".html" { - w.Header().Set("Cache-Control", "public, max-age=3600") - } else { - w.Header().Set("Cache-Control", "no-cache, must-revalidate") - } - - w.Write(content) -} diff --git a/webapp/assets/assets/garm-dark.svg b/webapp/assets/assets/garm-dark.svg deleted file mode 100644 index f0a0c564..00000000 --- a/webapp/assets/assets/garm-dark.svg +++ /dev/null @@ -1,37 +0,0 @@ - - - - - - - - - - - diff --git a/webapp/assets/assets/garm-light.svg b/webapp/assets/assets/garm-light.svg deleted file mode 100644 index 2495959d..00000000 --- a/webapp/assets/assets/garm-light.svg +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - - - - - diff --git a/webapp/assets/assets/gitea.svg b/webapp/assets/assets/gitea.svg deleted file mode 100644 index e4643ce3..00000000 --- a/webapp/assets/assets/gitea.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/webapp/assets/assets/github-mark-white.svg b/webapp/assets/assets/github-mark-white.svg deleted file mode 100644 index d5e64918..00000000 --- a/webapp/assets/assets/github-mark-white.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/webapp/assets/assets/github-mark.svg b/webapp/assets/assets/github-mark.svg deleted file mode 100644 index 37fa923d..00000000 --- a/webapp/assets/assets/github-mark.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/webapp/assets/favicon-dark.png b/webapp/assets/favicon-dark.png deleted file mode 100644 index d16186d1..00000000 Binary files a/webapp/assets/favicon-dark.png and /dev/null differ diff --git a/webapp/assets/favicon-light.png b/webapp/assets/favicon-light.png deleted file mode 100644 index 5390c2f9..00000000 Binary files a/webapp/assets/favicon-light.png and /dev/null differ diff --git a/webapp/assets/index.html b/webapp/assets/index.html deleted file mode 100644 index e5cfb2f3..00000000 --- a/webapp/assets/index.html +++ /dev/null @@ -1,105 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - -
                - -
                - - diff --git a/webapp/assets/openapitools.json b/webapp/assets/openapitools.json deleted file mode 100644 index a82623d6..00000000 --- a/webapp/assets/openapitools.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "$schema": "./node_modules/@openapitools/openapi-generator-cli/config.schema.json", - "spaces": 2, - "generator-cli": { - "version": "7.14.0" - } -} diff --git a/webapp/openapitools.json b/webapp/openapitools.json deleted file mode 100644 index a82623d6..00000000 --- a/webapp/openapitools.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "$schema": "./node_modules/@openapitools/openapi-generator-cli/config.schema.json", - "spaces": 2, - "generator-cli": { - "version": "7.14.0" - } -} diff --git a/webapp/package-lock.json b/webapp/package-lock.json deleted file mode 100644 index 90e63cd6..00000000 --- a/webapp/package-lock.json +++ /dev/null @@ -1,6983 +0,0 @@ -{ - "name": "garm-webapp", - "version": "0.0.1", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "garm-webapp", - "version": "0.0.1", - "license": "ISC", - "dependencies": { - "@codemirror/lang-json": "^6.0.2", - "@codemirror/state": "^6.5.2", - "@codemirror/theme-one-dark": "^6.1.3", - "@codemirror/view": "^6.38.1", - "@tailwindcss/typography": "^0.5.10", - "codemirror": "^6.0.2", - "cookie": "^0.7.0" - }, - "devDependencies": { - "@openapitools/openapi-generator-cli": "^2.22.0", - "@playwright/test": "^1.54.2", - "@sveltejs/adapter-static": "^3.0.1", - "@sveltejs/kit": "^2.0.0", - "@sveltejs/vite-plugin-svelte": "^6.1.0", - "@tailwindcss/forms": "^0.5.7", - "@tailwindcss/postcss": "^4.1.11", - "@testing-library/jest-dom": "^6.7.0", - "@testing-library/svelte": "^5.2.0-next.3", - "@testing-library/user-event": "^14.6.1", - "@types/node": "^24.2.0", - "@vitest/ui": "^3.2.4", - "autoprefixer": "^10.4.16", - "happy-dom": "^18.0.1", - "jsdom": "^26.1.0", - "postcss": "^8.4.32", - "svelte": "^5.38.0", - "svelte-check": "^4.3.1", - "swagger-typescript-api": "^13.2.7", - "tailwindcss": "^4.1.11", - "typescript": "^5.0.0", - "vite": "^7.1.1", - "vitest": "^3.2.4" - } - }, - "node_modules/@adobe/css-tools": { - "version": "4.4.4", - "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.4.4.tgz", - "integrity": "sha512-Elp+iwUx5rN5+Y8xLt5/GRoG20WGoDCQ/1Fb+1LiGtvwbDavuSk0jhD/eZdckHAuzcDzccnkv+rEjyWfRx18gg==", - "dev": true, - "license": "MIT" - }, - "node_modules/@alloc/quick-lru": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", - "integrity": "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@ampproject/remapping": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", - "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.24" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@asamuzakjp/css-color": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-3.2.0.tgz", - "integrity": "sha512-K1A6z8tS3XsmCMM86xoWdn7Fkdn9m6RSVtocUrJYIwZnFVkng/PvkEoWtOWmP+Scc6saYWHWZYbndEEXxl24jw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@csstools/css-calc": "^2.1.3", - "@csstools/css-color-parser": "^3.0.9", - "@csstools/css-parser-algorithms": "^3.0.4", - "@csstools/css-tokenizer": "^3.0.3", - "lru-cache": "^10.4.3" - } - }, - "node_modules/@babel/code-frame": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", - "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-validator-identifier": "^7.27.1", - "js-tokens": "^4.0.0", - "picocolors": "^1.1.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-identifier": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", - "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/runtime": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.3.tgz", - "integrity": "sha512-9uIQ10o0WGdpP6GDhXcdOJPJuDgFtIDtN/9+ArJQ2NAfAmiuhTQdzkaTGR33v43GYS2UrSA0eX2pPPHoFVvpxA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@biomejs/js-api": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@biomejs/js-api/-/js-api-1.0.0.tgz", - "integrity": "sha512-69OfQ7+09AtiCIg+k+aU3rEsGit5o/SJWCS3BeBH/2nJYdJGi0cIx+ybka8i1EK69aNcZxYO1y1iAAEmYMq1HA==", - "dev": true, - "license": "MIT OR Apache-2.0", - "peerDependencies": { - "@biomejs/wasm-bundler": "^2.0.0", - "@biomejs/wasm-nodejs": "^2.0.0", - "@biomejs/wasm-web": "^2.0.0" - }, - "peerDependenciesMeta": { - "@biomejs/wasm-bundler": { - "optional": true - }, - "@biomejs/wasm-nodejs": { - "optional": true - }, - "@biomejs/wasm-web": { - "optional": true - } - } - }, - "node_modules/@biomejs/wasm-nodejs": { - "version": "2.0.5", - "dev": true, - "license": "MIT OR Apache-2.0" - }, - "node_modules/@codemirror/autocomplete": { - "version": "6.18.6", - "resolved": "https://registry.npmjs.org/@codemirror/autocomplete/-/autocomplete-6.18.6.tgz", - "integrity": "sha512-PHHBXFomUs5DF+9tCOM/UoW6XQ4R44lLNNhRaW9PKPTU0D7lIjRg3ElxaJnTwsl/oHiR93WSXDBrekhoUGCPtg==", - "license": "MIT", - "dependencies": { - "@codemirror/language": "^6.0.0", - "@codemirror/state": "^6.0.0", - "@codemirror/view": "^6.17.0", - "@lezer/common": "^1.0.0" - } - }, - "node_modules/@codemirror/autocomplete/node_modules/@lezer/common": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.3.tgz", - "integrity": "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA==", - "license": "MIT" - }, - "node_modules/@codemirror/commands": { - "version": "6.8.1", - "resolved": "https://registry.npmjs.org/@codemirror/commands/-/commands-6.8.1.tgz", - "integrity": "sha512-KlGVYufHMQzxbdQONiLyGQDUW0itrLZwq3CcY7xpv9ZLRHqzkBSoteocBHtMCoY7/Ci4xhzSrToIeLg7FxHuaw==", - "license": "MIT", - "dependencies": { - "@codemirror/language": "^6.0.0", - "@codemirror/state": "^6.4.0", - "@codemirror/view": "^6.27.0", - "@lezer/common": "^1.1.0" - } - }, - "node_modules/@codemirror/commands/node_modules/@lezer/common": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.3.tgz", - "integrity": "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA==", - "license": "MIT" - }, - "node_modules/@codemirror/lang-json": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/@codemirror/lang-json/-/lang-json-6.0.2.tgz", - "integrity": "sha512-x2OtO+AvwEHrEwR0FyyPtfDUiloG3rnVTSZV1W8UteaLL8/MajQd8DpvUb2YVzC+/T18aSDv0H9mu+xw0EStoQ==", - "license": "MIT", - "dependencies": { - "@codemirror/language": "^6.0.0", - "@lezer/json": "^1.0.0" - } - }, - "node_modules/@codemirror/language": { - "version": "6.11.2", - "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.11.2.tgz", - "integrity": "sha512-p44TsNArL4IVXDTbapUmEkAlvWs2CFQbcfc0ymDsis1kH2wh0gcY96AS29c/vp2d0y2Tquk1EDSaawpzilUiAw==", - "license": "MIT", - "dependencies": { - "@codemirror/state": "^6.0.0", - "@codemirror/view": "^6.23.0", - "@lezer/common": "^1.1.0", - "@lezer/highlight": "^1.0.0", - "@lezer/lr": "^1.0.0", - "style-mod": "^4.0.0" - } - }, - "node_modules/@codemirror/language/node_modules/@lezer/common": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.3.tgz", - "integrity": "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA==", - "license": "MIT" - }, - "node_modules/@codemirror/language/node_modules/@lezer/lr": { - "version": "1.4.2", - "resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.2.tgz", - "integrity": "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA==", - "license": "MIT", - "dependencies": { - "@lezer/common": "^1.0.0" - } - }, - "node_modules/@codemirror/language/node_modules/style-mod": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.2.tgz", - "integrity": "sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw==", - "license": "MIT" - }, - "node_modules/@codemirror/lint": { - "version": "6.8.5", - "resolved": "https://registry.npmjs.org/@codemirror/lint/-/lint-6.8.5.tgz", - "integrity": "sha512-s3n3KisH7dx3vsoeGMxsbRAgKe4O1vbrnKBClm99PU0fWxmxsx5rR2PfqQgIt+2MMJBHbiJ5rfIdLYfB9NNvsA==", - "license": "MIT", - "dependencies": { - "@codemirror/state": "^6.0.0", - "@codemirror/view": "^6.35.0", - "crelt": "^1.0.5" - } - }, - "node_modules/@codemirror/lint/node_modules/crelt": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/crelt/-/crelt-1.0.6.tgz", - "integrity": "sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==", - "license": "MIT" - }, - "node_modules/@codemirror/search": { - "version": "6.5.11", - "resolved": "https://registry.npmjs.org/@codemirror/search/-/search-6.5.11.tgz", - "integrity": "sha512-KmWepDE6jUdL6n8cAAqIpRmLPBZ5ZKnicE8oGU/s3QrAVID+0VhLFrzUucVKHG5035/BSykhExDL/Xm7dHthiA==", - "license": "MIT", - "dependencies": { - "@codemirror/state": "^6.0.0", - "@codemirror/view": "^6.0.0", - "crelt": "^1.0.5" - } - }, - "node_modules/@codemirror/search/node_modules/crelt": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/crelt/-/crelt-1.0.6.tgz", - "integrity": "sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==", - "license": "MIT" - }, - "node_modules/@codemirror/state": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.5.2.tgz", - "integrity": "sha512-FVqsPqtPWKVVL3dPSxy8wEF/ymIEuVzF1PK3VbUgrxXpJUSHQWWZz4JMToquRxnkw+36LTamCZG2iua2Ptq0fA==", - "license": "MIT", - "dependencies": { - "@marijn/find-cluster-break": "^1.0.0" - } - }, - "node_modules/@codemirror/theme-one-dark": { - "version": "6.1.3", - "resolved": "https://registry.npmjs.org/@codemirror/theme-one-dark/-/theme-one-dark-6.1.3.tgz", - "integrity": "sha512-NzBdIvEJmx6fjeremiGp3t/okrLPYT0d9orIc7AFun8oZcRk58aejkqhv6spnz4MLAevrKNPMQYXEWMg4s+sKA==", - "license": "MIT", - "dependencies": { - "@codemirror/language": "^6.0.0", - "@codemirror/state": "^6.0.0", - "@codemirror/view": "^6.0.0", - "@lezer/highlight": "^1.0.0" - } - }, - "node_modules/@codemirror/view": { - "version": "6.38.1", - "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.38.1.tgz", - "integrity": "sha512-RmTOkE7hRU3OVREqFVITWHz6ocgBjv08GoePscAakgVQfciA3SGCEk7mb9IzwW61cKKmlTpHXG6DUE5Ubx+MGQ==", - "license": "MIT", - "dependencies": { - "@codemirror/state": "^6.5.0", - "crelt": "^1.0.6", - "style-mod": "^4.1.0", - "w3c-keyname": "^2.2.4" - } - }, - "node_modules/@codemirror/view/node_modules/crelt": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/crelt/-/crelt-1.0.6.tgz", - "integrity": "sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==", - "license": "MIT" - }, - "node_modules/@codemirror/view/node_modules/style-mod": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.2.tgz", - "integrity": "sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw==", - "license": "MIT" - }, - "node_modules/@codemirror/view/node_modules/w3c-keyname": { - "version": "2.2.8", - "resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz", - "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==", - "license": "MIT" - }, - "node_modules/@csstools/color-helpers": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/@csstools/color-helpers/-/color-helpers-5.0.2.tgz", - "integrity": "sha512-JqWH1vsgdGcw2RR6VliXXdA0/59LttzlU8UlRT/iUUsEeWfYq8I+K0yhihEUTTHLRm1EXvpsCx3083EU15ecsA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "engines": { - "node": ">=18" - } - }, - "node_modules/@csstools/css-calc": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/@csstools/css-calc/-/css-calc-2.1.4.tgz", - "integrity": "sha512-3N8oaj+0juUw/1H3YwmDDJXCgTB1gKU6Hc/bB502u9zR0q2vd786XJH9QfrKIEgFlZmhZiq6epXl4rHqhzsIgQ==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4" - } - }, - "node_modules/@csstools/css-color-parser": { - "version": "3.0.10", - "resolved": "https://registry.npmjs.org/@csstools/css-color-parser/-/css-color-parser-3.0.10.tgz", - "integrity": "sha512-TiJ5Ajr6WRd1r8HSiwJvZBiJOqtH86aHpUjq5aEKWHiII2Qfjqd/HCWKPOW8EP4vcspXbHnXrwIDlu5savQipg==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT", - "dependencies": { - "@csstools/color-helpers": "^5.0.2", - "@csstools/css-calc": "^2.1.4" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4" - } - }, - "node_modules/@csstools/css-parser-algorithms": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/@csstools/css-parser-algorithms/-/css-parser-algorithms-3.0.5.tgz", - "integrity": "sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@csstools/css-tokenizer": "^3.0.4" - } - }, - "node_modules/@csstools/css-tokenizer": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@csstools/css-tokenizer/-/css-tokenizer-3.0.4.tgz", - "integrity": "sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT", - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/aix-ppc64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.8.tgz", - "integrity": "sha512-urAvrUedIqEiFR3FYSLTWQgLu5tb+m0qZw0NBEasUeo6wuqatkMDaRT+1uABiGXEu5vqgPd7FGE1BhsAIy9QVA==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "aix" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-arm": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.8.tgz", - "integrity": "sha512-RONsAvGCz5oWyePVnLdZY/HHwA++nxYWIX1atInlaW6SEkwq6XkP3+cb825EUcRs5Vss/lGh/2YxAb5xqc07Uw==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.8.tgz", - "integrity": "sha512-OD3p7LYzWpLhZEyATcTSJ67qB5D+20vbtr6vHlHWSQYhKtzUYrETuWThmzFpZtFsBIxRvhO07+UgVA9m0i/O1w==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.8.tgz", - "integrity": "sha512-yJAVPklM5+4+9dTeKwHOaA+LQkmrKFX96BM0A/2zQrbS6ENCmxc4OVoBs5dPkCCak2roAD+jKCdnmOqKszPkjA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/darwin-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.8.tgz", - "integrity": "sha512-Jw0mxgIaYX6R8ODrdkLLPwBqHTtYHJSmzzd+QeytSugzQ0Vg4c5rDky5VgkoowbZQahCbsv1rT1KW72MPIkevw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/darwin-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.8.tgz", - "integrity": "sha512-Vh2gLxxHnuoQ+GjPNvDSDRpoBCUzY4Pu0kBqMBDlK4fuWbKgGtmDIeEC081xi26PPjn+1tct+Bh8FjyLlw1Zlg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/freebsd-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.8.tgz", - "integrity": "sha512-YPJ7hDQ9DnNe5vxOm6jaie9QsTwcKedPvizTVlqWG9GBSq+BuyWEDazlGaDTC5NGU4QJd666V0yqCBL2oWKPfA==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/freebsd-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.8.tgz", - "integrity": "sha512-MmaEXxQRdXNFsRN/KcIimLnSJrk2r5H8v+WVafRWz5xdSVmWLoITZQXcgehI2ZE6gioE6HirAEToM/RvFBeuhw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-arm": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.8.tgz", - "integrity": "sha512-FuzEP9BixzZohl1kLf76KEVOsxtIBFwCaLupVuk4eFVnOZfU+Wsn+x5Ryam7nILV2pkq2TqQM9EZPsOBuMC+kg==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.8.tgz", - "integrity": "sha512-WIgg00ARWv/uYLU7lsuDK00d/hHSfES5BzdWAdAig1ioV5kaFNrtK8EqGcUBJhYqotlUByUKz5Qo6u8tt7iD/w==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-ia32": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.8.tgz", - "integrity": "sha512-A1D9YzRX1i+1AJZuFFUMP1E9fMaYY+GnSQil9Tlw05utlE86EKTUA7RjwHDkEitmLYiFsRd9HwKBPEftNdBfjg==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-loong64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.8.tgz", - "integrity": "sha512-O7k1J/dwHkY1RMVvglFHl1HzutGEFFZ3kNiDMSOyUrB7WcoHGf96Sh+64nTRT26l3GMbCW01Ekh/ThKM5iI7hQ==", - "cpu": [ - "loong64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-mips64el": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.8.tgz", - "integrity": "sha512-uv+dqfRazte3BzfMp8PAQXmdGHQt2oC/y2ovwpTteqrMx2lwaksiFZ/bdkXJC19ttTvNXBuWH53zy/aTj1FgGw==", - "cpu": [ - "mips64el" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-ppc64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.8.tgz", - "integrity": "sha512-GyG0KcMi1GBavP5JgAkkstMGyMholMDybAf8wF5A70CALlDM2p/f7YFE7H92eDeH/VBtFJA5MT4nRPDGg4JuzQ==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-riscv64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.8.tgz", - "integrity": "sha512-rAqDYFv3yzMrq7GIcen3XP7TUEG/4LK86LUPMIz6RT8A6pRIDn0sDcvjudVZBiiTcZCY9y2SgYX2lgK3AF+1eg==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-s390x": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.8.tgz", - "integrity": "sha512-Xutvh6VjlbcHpsIIbwY8GVRbwoviWT19tFhgdA7DlenLGC/mbc3lBoVb7jxj9Z+eyGqvcnSyIltYUrkKzWqSvg==", - "cpu": [ - "s390x" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.8.tgz", - "integrity": "sha512-ASFQhgY4ElXh3nDcOMTkQero4b1lgubskNlhIfJrsH5OKZXDpUAKBlNS0Kx81jwOBp+HCeZqmoJuihTv57/jvQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/netbsd-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.8.tgz", - "integrity": "sha512-d1KfruIeohqAi6SA+gENMuObDbEjn22olAR7egqnkCD9DGBG0wsEARotkLgXDu6c4ncgWTZJtN5vcgxzWRMzcw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/netbsd-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.8.tgz", - "integrity": "sha512-nVDCkrvx2ua+XQNyfrujIG38+YGyuy2Ru9kKVNyh5jAys6n+l44tTtToqHjino2My8VAY6Lw9H7RI73XFi66Cg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.8.tgz", - "integrity": "sha512-j8HgrDuSJFAujkivSMSfPQSAa5Fxbvk4rgNAS5i3K+r8s1X0p1uOO2Hl2xNsGFppOeHOLAVgYwDVlmxhq5h+SQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.8.tgz", - "integrity": "sha512-1h8MUAwa0VhNCDp6Af0HToI2TJFAn1uqT9Al6DJVzdIBAd21m/G0Yfc77KDM3uF3T/YaOgQq3qTJHPbTOInaIQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openharmony-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.8.tgz", - "integrity": "sha512-r2nVa5SIK9tSWd0kJd9HCffnDHKchTGikb//9c7HX+r+wHYCpQrSgxhlY6KWV1nFo1l4KFbsMlHk+L6fekLsUg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openharmony" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/sunos-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.8.tgz", - "integrity": "sha512-zUlaP2S12YhQ2UzUfcCuMDHQFJyKABkAjvO5YSndMiIkMimPmxA+BYSBikWgsRpvyxuRnow4nS5NPnf9fpv41w==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "sunos" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.8.tgz", - "integrity": "sha512-YEGFFWESlPva8hGL+zvj2z/SaK+pH0SwOM0Nc/d+rVnW7GSTFlLBGzZkuSU9kFIGIo8q9X3ucpZhu8PDN5A2sQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-ia32": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.8.tgz", - "integrity": "sha512-hiGgGC6KZ5LZz58OL/+qVVoZiuZlUYlYHNAmczOm7bs2oE1XriPFi5ZHHrS8ACpV5EjySrnoCKmcbQMN+ojnHg==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.8.tgz", - "integrity": "sha512-cn3Yr7+OaaZq1c+2pe+8yxC8E144SReCQjN6/2ynubzYjvyqZjTXfQJpAcQpsdJq3My7XADANiYGHoFC69pLQw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@inquirer/external-editor": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@inquirer/external-editor/-/external-editor-1.0.1.tgz", - "integrity": "sha512-Oau4yL24d2B5IL4ma4UpbQigkVhzPDXLoqy1ggK4gnHg/stmkffJE4oOXHXF3uz0UEpywG68KcyXsyYpA1Re/Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "chardet": "^2.1.0", - "iconv-lite": "^0.6.3" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } - } - }, - "node_modules/@isaacs/fs-minipass": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", - "integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==", - "dev": true, - "license": "ISC", - "dependencies": { - "minipass": "^7.0.4" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.12", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.12.tgz", - "integrity": "sha512-OuLGC46TjB5BbN1dH8JULVVZY4WTdkF7tV9Ys6wLL1rubZnCMstOhNHueU5bLCrnRuDhKPDM4g6sw4Bel5Gzqg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/sourcemap-codec": "^1.5.0", - "@jridgewell/trace-mapping": "^0.3.24" - } - }, - "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.4", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.4.tgz", - "integrity": "sha512-VT2+G1VQs/9oz078bLrYbecdZKs912zQlkelYpuf+SXF+QvZDYJlbx/LSx+meSAwdDFnF8FVXW92AVjjkVmgFw==", - "dev": true, - "license": "MIT" - }, - "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.29", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.29.tgz", - "integrity": "sha512-uw6guiW/gcAGPDhLmd77/6lW8QLeiV5RUTsAX46Db6oLhGaVj4lhnPwb184s1bkc8kdVg/+h988dro8GRDpmYQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/resolve-uri": "^3.1.0", - "@jridgewell/sourcemap-codec": "^1.4.14" - } - }, - "node_modules/@lezer/highlight": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@lezer/highlight/-/highlight-1.2.1.tgz", - "integrity": "sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA==", - "license": "MIT", - "dependencies": { - "@lezer/common": "^1.0.0" - } - }, - "node_modules/@lezer/highlight/node_modules/@lezer/common": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.3.tgz", - "integrity": "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA==", - "license": "MIT" - }, - "node_modules/@lezer/json": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@lezer/json/-/json-1.0.3.tgz", - "integrity": "sha512-BP9KzdF9Y35PDpv04r0VeSTKDeox5vVr3efE7eBbx3r4s3oNLfunchejZhjArmeieBH+nVOpgIiBJpEAv8ilqQ==", - "license": "MIT", - "dependencies": { - "@lezer/common": "^1.2.0", - "@lezer/highlight": "^1.0.0", - "@lezer/lr": "^1.0.0" - } - }, - "node_modules/@lezer/json/node_modules/@lezer/common": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.3.tgz", - "integrity": "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA==", - "license": "MIT" - }, - "node_modules/@lezer/json/node_modules/@lezer/lr": { - "version": "1.4.2", - "resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.2.tgz", - "integrity": "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA==", - "license": "MIT", - "dependencies": { - "@lezer/common": "^1.0.0" - } - }, - "node_modules/@lukeed/csprng": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@lukeed/csprng/-/csprng-1.1.0.tgz", - "integrity": "sha512-Z7C/xXCiGWsg0KuKsHTKJxbWhpI3Vs5GwLfOean7MGyVFGqdRgBbAjOCh6u4bbjPc/8MJ2pZmK/0DLdCbivLDA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/@marijn/find-cluster-break": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@marijn/find-cluster-break/-/find-cluster-break-1.0.2.tgz", - "integrity": "sha512-l0h88YhZFyKdXIFNfSWpyjStDjGHwZ/U7iobcK1cQQD8sejsONdQtTVU+1wVN1PBw40PiiHB1vA5S7VTfQiP9g==", - "license": "MIT" - }, - "node_modules/@nestjs/axios": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@nestjs/axios/-/axios-4.0.1.tgz", - "integrity": "sha512-68pFJgu+/AZbWkGu65Z3r55bTsCPlgyKaV4BSG8yUAD72q1PPuyVRgUwFv6BxdnibTUHlyxm06FmYWNC+bjN7A==", - "dev": true, - "license": "MIT", - "peerDependencies": { - "@nestjs/common": "^10.0.0 || ^11.0.0", - "axios": "^1.3.1", - "rxjs": "^7.0.0" - } - }, - "node_modules/@nestjs/common": { - "version": "11.1.6", - "resolved": "https://registry.npmjs.org/@nestjs/common/-/common-11.1.6.tgz", - "integrity": "sha512-krKwLLcFmeuKDqngG2N/RuZHCs2ycsKcxWIDgcm7i1lf3sQ0iG03ci+DsP/r3FcT/eJDFsIHnKtNta2LIi7PzQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "file-type": "21.0.0", - "iterare": "1.2.1", - "load-esm": "1.0.2", - "tslib": "2.8.1", - "uid": "2.0.2" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/nest" - }, - "peerDependencies": { - "class-transformer": ">=0.4.1", - "class-validator": ">=0.13.2", - "reflect-metadata": "^0.1.12 || ^0.2.0", - "rxjs": "^7.1.0" - }, - "peerDependenciesMeta": { - "class-transformer": { - "optional": true - }, - "class-validator": { - "optional": true - } - } - }, - "node_modules/@nestjs/common/node_modules/@tokenizer/inflate": { - "version": "0.2.7", - "resolved": "https://registry.npmjs.org/@tokenizer/inflate/-/inflate-0.2.7.tgz", - "integrity": "sha512-MADQgmZT1eKjp06jpI2yozxaU9uVs4GzzgSL+uEq7bVcJ9V1ZXQkeGNql1fsSI0gMy1vhvNTNbUqrx+pZfJVmg==", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "^4.4.0", - "fflate": "^0.8.2", - "token-types": "^6.0.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/Borewit" - } - }, - "node_modules/@nestjs/common/node_modules/file-type": { - "version": "21.0.0", - "resolved": "https://registry.npmjs.org/file-type/-/file-type-21.0.0.tgz", - "integrity": "sha512-ek5xNX2YBYlXhiUXui3D/BXa3LdqPmoLJ7rqEx2bKJ7EAUEfmXgW0Das7Dc6Nr9MvqaOnIqiPV0mZk/r/UpNAg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@tokenizer/inflate": "^0.2.7", - "strtok3": "^10.2.2", - "token-types": "^6.0.0", - "uint8array-extras": "^1.4.0" - }, - "engines": { - "node": ">=20" - }, - "funding": { - "url": "https://github.com/sindresorhus/file-type?sponsor=1" - } - }, - "node_modules/@nestjs/common/node_modules/strtok3": { - "version": "10.3.4", - "resolved": "https://registry.npmjs.org/strtok3/-/strtok3-10.3.4.tgz", - "integrity": "sha512-KIy5nylvC5le1OdaaoCJ07L+8iQzJHGH6pWDuzS+d07Cu7n1MZ2x26P8ZKIWfbK02+XIL8Mp4RkWeqdUCrDMfg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@tokenizer/token": "^0.3.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/Borewit" - } - }, - "node_modules/@nestjs/core": { - "version": "11.1.6", - "resolved": "https://registry.npmjs.org/@nestjs/core/-/core-11.1.6.tgz", - "integrity": "sha512-siWX7UDgErisW18VTeJA+x+/tpNZrJewjTBsRPF3JVxuWRuAB1kRoiJcxHgln8Lb5UY9NdvklITR84DUEXD0Cg==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "dependencies": { - "@nuxt/opencollective": "0.4.1", - "fast-safe-stringify": "2.1.1", - "iterare": "1.2.1", - "path-to-regexp": "8.2.0", - "tslib": "2.8.1", - "uid": "2.0.2" - }, - "engines": { - "node": ">= 20" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/nest" - }, - "peerDependencies": { - "@nestjs/common": "^11.0.0", - "@nestjs/microservices": "^11.0.0", - "@nestjs/platform-express": "^11.0.0", - "@nestjs/websockets": "^11.0.0", - "reflect-metadata": "^0.1.12 || ^0.2.0", - "rxjs": "^7.1.0" - }, - "peerDependenciesMeta": { - "@nestjs/microservices": { - "optional": true - }, - "@nestjs/platform-express": { - "optional": true - }, - "@nestjs/websockets": { - "optional": true - } - } - }, - "node_modules/@nuxt/opencollective": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/@nuxt/opencollective/-/opencollective-0.4.1.tgz", - "integrity": "sha512-GXD3wy50qYbxCJ652bDrDzgMr3NFEkIS374+IgFQKkCvk9yiYcLvX2XDYr7UyQxf4wK0e+yqDYRubZ0DtOxnmQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "consola": "^3.2.3" - }, - "bin": { - "opencollective": "bin/opencollective.js" - }, - "engines": { - "node": "^14.18.0 || >=16.10.0", - "npm": ">=5.10.0" - } - }, - "node_modules/@nuxt/opencollective/node_modules/consola": { - "version": "3.4.2", - "resolved": "https://registry.npmjs.org/consola/-/consola-3.4.2.tgz", - "integrity": "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^14.18.0 || >=16.10.0" - } - }, - "node_modules/@nuxtjs/opencollective": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/@nuxtjs/opencollective/-/opencollective-0.3.2.tgz", - "integrity": "sha512-um0xL3fO7Mf4fDxcqx9KryrB7zgRM5JSlvGN5AGkP6JLM5XEKyjeAiPbNxdXVXQ16isuAhYpvP88NgL2BGd6aA==", - "dev": true, - "license": "MIT", - "dependencies": { - "chalk": "^4.1.0", - "consola": "^2.15.0", - "node-fetch": "^2.6.1" - }, - "bin": { - "opencollective": "bin/opencollective.js" - }, - "engines": { - "node": ">=8.0.0", - "npm": ">=5.0.0" - } - }, - "node_modules/@nuxtjs/opencollective/node_modules/node-fetch": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", - "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", - "dev": true, - "license": "MIT", - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } - }, - "node_modules/@openapitools/openapi-generator-cli": { - "version": "2.22.0", - "resolved": "https://registry.npmjs.org/@openapitools/openapi-generator-cli/-/openapi-generator-cli-2.22.0.tgz", - "integrity": "sha512-HdjSiKsXpbnXBcSCnft494fv5pFZxPKFAV1eR+yMjo3bt1ONLb7OGy1D/5SrbjRfy9b82JcYUJ3gssh49suWKg==", - "dev": true, - "hasInstallScript": true, - "license": "Apache-2.0", - "dependencies": { - "@nestjs/axios": "4.0.1", - "@nestjs/common": "11.1.6", - "@nestjs/core": "11.1.6", - "@nuxtjs/opencollective": "0.3.2", - "axios": "1.11.0", - "chalk": "4.1.2", - "commander": "8.3.0", - "compare-versions": "4.1.4", - "concurrently": "9.2.0", - "console.table": "0.10.0", - "fs-extra": "11.3.1", - "glob": "11.0.3", - "inquirer": "8.2.7", - "proxy-agent": "6.5.0", - "reflect-metadata": "0.2.2", - "rxjs": "7.8.2", - "tslib": "2.8.1" - }, - "bin": { - "openapi-generator-cli": "main.js" - }, - "engines": { - "node": ">=16" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/openapi_generator" - } - }, - "node_modules/@playwright/test": { - "version": "1.54.2", - "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.54.2.tgz", - "integrity": "sha512-A+znathYxPf+72riFd1r1ovOLqsIIB0jKIoPjyK2kqEIe30/6jF6BC7QNluHuwUmsD2tv1XZVugN8GqfTMOxsA==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "playwright": "1.54.2" - }, - "bin": { - "playwright": "cli.js" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@polka/url": { - "version": "1.0.0-next.29", - "dev": true, - "license": "MIT" - }, - "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.46.2", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@sveltejs/acorn-typescript": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@sveltejs/acorn-typescript/-/acorn-typescript-1.0.5.tgz", - "integrity": "sha512-IwQk4yfwLdibDlrXVE04jTZYlLnwsTT2PIOQQGNLWfjavGifnk1JD1LcZjZaBTRcxZu2FfPfNLOE04DSu9lqtQ==", - "dev": true, - "license": "MIT", - "peerDependencies": { - "acorn": "^8.9.0" - } - }, - "node_modules/@sveltejs/adapter-static": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@sveltejs/adapter-static/-/adapter-static-3.0.9.tgz", - "integrity": "sha512-aytHXcMi7lb9ljsWUzXYQ0p5X1z9oWud2olu/EpmH7aCu4m84h7QLvb5Wp+CFirKcwoNnYvYWhyP/L8Vh1ztdw==", - "dev": true, - "license": "MIT", - "peerDependencies": { - "@sveltejs/kit": "^2.0.0" - } - }, - "node_modules/@sveltejs/kit": { - "version": "2.27.3", - "resolved": "https://registry.npmjs.org/@sveltejs/kit/-/kit-2.27.3.tgz", - "integrity": "sha512-jiG3NGZ8RRpi+ncjVnX+oR7uWEgzy//3YLGcTU5mHtjGraeGyNDr7GJFHlk7z0vi8bMXpXIUkEXj6p70FJmHvw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@standard-schema/spec": "^1.0.0", - "@sveltejs/acorn-typescript": "^1.0.5", - "@types/cookie": "^0.6.0", - "acorn": "^8.14.1", - "cookie": "^0.6.0", - "devalue": "^5.1.0", - "esm-env": "^1.2.2", - "kleur": "^4.1.5", - "magic-string": "^0.30.5", - "mrmime": "^2.0.0", - "sade": "^1.8.1", - "set-cookie-parser": "^2.6.0", - "sirv": "^3.0.0" - }, - "bin": { - "svelte-kit": "svelte-kit.js" - }, - "engines": { - "node": ">=18.13" - }, - "peerDependencies": { - "@sveltejs/vite-plugin-svelte": "^3.0.0 || ^4.0.0-next.1 || ^5.0.0 || ^6.0.0-next.0", - "svelte": "^4.0.0 || ^5.0.0-next.0", - "vite": "^5.0.3 || ^6.0.0 || ^7.0.0-beta.0" - } - }, - "node_modules/@sveltejs/kit/node_modules/@standard-schema/spec": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.0.0.tgz", - "integrity": "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==", - "dev": true, - "license": "MIT" - }, - "node_modules/@sveltejs/kit/node_modules/devalue": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/devalue/-/devalue-5.1.1.tgz", - "integrity": "sha512-maua5KUiapvEwiEAe+XnlZ3Rh0GD+qI1J/nb9vrJc3muPXvcF/8gXYTWF76+5DAqHyDUtOIImEuo0YKE9mshVw==", - "dev": true, - "license": "MIT" - }, - "node_modules/@sveltejs/vite-plugin-svelte": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte/-/vite-plugin-svelte-6.1.0.tgz", - "integrity": "sha512-+U6lz1wvGEG/BvQyL4z/flyNdQ9xDNv5vrh+vWBWTHaebqT0c9RNggpZTo/XSPoHsSCWBlYaTlRX8pZ9GATXCw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@sveltejs/vite-plugin-svelte-inspector": "^5.0.0-next.1", - "debug": "^4.4.1", - "deepmerge": "^4.3.1", - "kleur": "^4.1.5", - "magic-string": "^0.30.17", - "vitefu": "^1.1.1" - }, - "engines": { - "node": "^20.19 || ^22.12 || >=24" - }, - "peerDependencies": { - "svelte": "^5.0.0", - "vite": "^6.3.0 || ^7.0.0" - } - }, - "node_modules/@sveltejs/vite-plugin-svelte-inspector": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte-inspector/-/vite-plugin-svelte-inspector-5.0.0.tgz", - "integrity": "sha512-iwQ8Z4ET6ZFSt/gC+tVfcsSBHwsqc6RumSaiLUkAurW3BCpJam65cmHw0oOlDMTO0u+PZi9hilBRYN+LZNHTUQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "^4.4.1" - }, - "engines": { - "node": "^20.19 || ^22.12 || >=24" - }, - "peerDependencies": { - "@sveltejs/vite-plugin-svelte": "^6.0.0-next.0", - "svelte": "^5.0.0", - "vite": "^6.3.0 || ^7.0.0" - } - }, - "node_modules/@tailwindcss/forms": { - "version": "0.5.10", - "resolved": "https://registry.npmjs.org/@tailwindcss/forms/-/forms-0.5.10.tgz", - "integrity": "sha512-utI1ONF6uf/pPNO68kmN1b8rEwNXv3czukalo8VtJH8ksIkZXr3Q3VYudZLkCsDd4Wku120uF02hYK25XGPorw==", - "dev": true, - "license": "MIT", - "dependencies": { - "mini-svg-data-uri": "^1.2.3" - }, - "peerDependencies": { - "tailwindcss": ">=3.0.0 || >= 3.0.0-alpha.1 || >= 4.0.0-alpha.20 || >= 4.0.0-beta.1" - } - }, - "node_modules/@tailwindcss/node": { - "version": "4.1.11", - "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.1.11.tgz", - "integrity": "sha512-yzhzuGRmv5QyU9qLNg4GTlYI6STedBWRE7NjxP45CsFYYq9taI0zJXZBMqIC/c8fViNLhmrbpSFS57EoxUmD6Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@ampproject/remapping": "^2.3.0", - "enhanced-resolve": "^5.18.1", - "jiti": "^2.4.2", - "lightningcss": "1.30.1", - "magic-string": "^0.30.17", - "source-map-js": "^1.2.1", - "tailwindcss": "4.1.11" - } - }, - "node_modules/@tailwindcss/oxide": { - "version": "4.1.11", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.1.11.tgz", - "integrity": "sha512-Q69XzrtAhuyfHo+5/HMgr1lAiPP/G40OMFAnws7xcFEYqcypZmdW8eGXaOUIeOl1dzPJBPENXgbjsOyhg2nkrg==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "dependencies": { - "detect-libc": "^2.0.4", - "tar": "^7.4.3" - }, - "engines": { - "node": ">= 10" - }, - "optionalDependencies": { - "@tailwindcss/oxide-android-arm64": "4.1.11", - "@tailwindcss/oxide-darwin-arm64": "4.1.11", - "@tailwindcss/oxide-darwin-x64": "4.1.11", - "@tailwindcss/oxide-freebsd-x64": "4.1.11", - "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.11", - "@tailwindcss/oxide-linux-arm64-gnu": "4.1.11", - "@tailwindcss/oxide-linux-arm64-musl": "4.1.11", - "@tailwindcss/oxide-linux-x64-gnu": "4.1.11", - "@tailwindcss/oxide-linux-x64-musl": "4.1.11", - "@tailwindcss/oxide-wasm32-wasi": "4.1.11", - "@tailwindcss/oxide-win32-arm64-msvc": "4.1.11", - "@tailwindcss/oxide-win32-x64-msvc": "4.1.11" - } - }, - "node_modules/@tailwindcss/oxide-android-arm64": { - "version": "4.1.11", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.1.11.tgz", - "integrity": "sha512-3IfFuATVRUMZZprEIx9OGDjG3Ou3jG4xQzNTvjDoKmU9JdmoCohQJ83MYd0GPnQIu89YoJqvMM0G3uqLRFtetg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@tailwindcss/oxide-darwin-arm64": { - "version": "4.1.11", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.1.11.tgz", - "integrity": "sha512-ESgStEOEsyg8J5YcMb1xl8WFOXfeBmrhAwGsFxxB2CxY9evy63+AtpbDLAyRkJnxLy2WsD1qF13E97uQyP1lfQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@tailwindcss/oxide-darwin-x64": { - "version": "4.1.11", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.1.11.tgz", - "integrity": "sha512-EgnK8kRchgmgzG6jE10UQNaH9Mwi2n+yw1jWmof9Vyg2lpKNX2ioe7CJdf9M5f8V9uaQxInenZkOxnTVL3fhAw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@tailwindcss/oxide-freebsd-x64": { - "version": "4.1.11", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.1.11.tgz", - "integrity": "sha512-xdqKtbpHs7pQhIKmqVpxStnY1skuNh4CtbcyOHeX1YBE0hArj2romsFGb6yUmzkq/6M24nkxDqU8GYrKrz+UcA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@tailwindcss/oxide-linux-arm-gnueabihf": { - "version": "4.1.11", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.1.11.tgz", - "integrity": "sha512-ryHQK2eyDYYMwB5wZL46uoxz2zzDZsFBwfjssgB7pzytAeCCa6glsiJGjhTEddq/4OsIjsLNMAiMlHNYnkEEeg==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@tailwindcss/oxide-linux-arm64-gnu": { - "version": "4.1.11", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.1.11.tgz", - "integrity": "sha512-mYwqheq4BXF83j/w75ewkPJmPZIqqP1nhoghS9D57CLjsh3Nfq0m4ftTotRYtGnZd3eCztgbSPJ9QhfC91gDZQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@tailwindcss/oxide-linux-arm64-musl": { - "version": "4.1.11", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.1.11.tgz", - "integrity": "sha512-m/NVRFNGlEHJrNVk3O6I9ggVuNjXHIPoD6bqay/pubtYC9QIdAMpS+cswZQPBLvVvEF6GtSNONbDkZrjWZXYNQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@tailwindcss/oxide-linux-x64-gnu": { - "version": "4.1.11", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.1.11.tgz", - "integrity": "sha512-YW6sblI7xukSD2TdbbaeQVDysIm/UPJtObHJHKxDEcW2exAtY47j52f8jZXkqE1krdnkhCMGqP3dbniu1Te2Fg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@tailwindcss/oxide-linux-x64-musl": { - "version": "4.1.11", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.1.11.tgz", - "integrity": "sha512-e3C/RRhGunWYNC3aSF7exsQkdXzQ/M+aYuZHKnw4U7KQwTJotnWsGOIVih0s2qQzmEzOFIJ3+xt7iq67K/p56Q==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@tailwindcss/oxide-wasm32-wasi": { - "version": "4.1.11", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.1.11.tgz", - "integrity": "sha512-Xo1+/GU0JEN/C/dvcammKHzeM6NqKovG+6921MR6oadee5XPBaKOumrJCXvopJ/Qb5TH7LX/UAywbqrP4lax0g==", - "bundleDependencies": [ - "@napi-rs/wasm-runtime", - "@emnapi/core", - "@emnapi/runtime", - "@tybys/wasm-util", - "@emnapi/wasi-threads", - "tslib" - ], - "cpu": [ - "wasm32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "dependencies": { - "@emnapi/core": "^1.4.3", - "@emnapi/runtime": "^1.4.3", - "@emnapi/wasi-threads": "^1.0.2", - "@napi-rs/wasm-runtime": "^0.2.11", - "@tybys/wasm-util": "^0.9.0", - "tslib": "^2.8.0" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/core": { - "version": "1.4.3", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, - "dependencies": { - "@emnapi/wasi-threads": "1.0.2", - "tslib": "^2.4.0" - } - }, - "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/runtime": { - "version": "1.4.3", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, - "dependencies": { - "tslib": "^2.4.0" - } - }, - "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/wasi-threads": { - "version": "1.0.2", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, - "dependencies": { - "tslib": "^2.4.0" - } - }, - "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@napi-rs/wasm-runtime": { - "version": "0.2.11", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, - "dependencies": { - "@emnapi/core": "^1.4.3", - "@emnapi/runtime": "^1.4.3", - "@tybys/wasm-util": "^0.9.0" - } - }, - "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@tybys/wasm-util": { - "version": "0.9.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, - "dependencies": { - "tslib": "^2.4.0" - } - }, - "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/tslib": { - "version": "2.8.0", - "dev": true, - "inBundle": true, - "license": "0BSD", - "optional": true - }, - "node_modules/@tailwindcss/oxide-win32-arm64-msvc": { - "version": "4.1.11", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.1.11.tgz", - "integrity": "sha512-UgKYx5PwEKrac3GPNPf6HVMNhUIGuUh4wlDFR2jYYdkX6pL/rn73zTq/4pzUm8fOjAn5L8zDeHp9iXmUGOXZ+w==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@tailwindcss/oxide-win32-x64-msvc": { - "version": "4.1.11", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.1.11.tgz", - "integrity": "sha512-YfHoggn1j0LK7wR82TOucWc5LDCguHnoS879idHekmmiR7g9HUtMw9MI0NHatS28u/Xlkfi9w5RJWgz2Dl+5Qg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@tailwindcss/postcss": { - "version": "4.1.11", - "resolved": "https://registry.npmjs.org/@tailwindcss/postcss/-/postcss-4.1.11.tgz", - "integrity": "sha512-q/EAIIpF6WpLhKEuQSEVMZNMIY8KhWoAemZ9eylNAih9jxMGAYPPWBn3I9QL/2jZ+e7OEz/tZkX5HwbBR4HohA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@alloc/quick-lru": "^5.2.0", - "@tailwindcss/node": "4.1.11", - "@tailwindcss/oxide": "4.1.11", - "postcss": "^8.4.41", - "tailwindcss": "4.1.11" - } - }, - "node_modules/@tailwindcss/typography": { - "version": "0.5.16", - "resolved": "https://registry.npmjs.org/@tailwindcss/typography/-/typography-0.5.16.tgz", - "integrity": "sha512-0wDLwCVF5V3x3b1SGXPCDcdsbDHMBe+lkFzBRaHeLvNi+nrrnZ1lA18u+OTWO8iSWU2GxUOCvlXtDuqftc1oiA==", - "license": "MIT", - "dependencies": { - "lodash.castarray": "^4.4.0", - "lodash.isplainobject": "^4.0.6", - "lodash.merge": "^4.6.2", - "postcss-selector-parser": "6.0.10" - }, - "peerDependencies": { - "tailwindcss": ">=3.0.0 || insiders || >=4.0.0-alpha.20 || >=4.0.0-beta.1" - } - }, - "node_modules/@testing-library/dom": { - "version": "10.4.1", - "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-10.4.1.tgz", - "integrity": "sha512-o4PXJQidqJl82ckFaXUeoAW+XysPLauYI43Abki5hABd853iMhitooc6znOnczgbTYmEP6U6/y1ZyKAIsvMKGg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.10.4", - "@babel/runtime": "^7.12.5", - "@types/aria-query": "^5.0.1", - "aria-query": "5.3.0", - "dom-accessibility-api": "^0.5.9", - "lz-string": "^1.5.0", - "picocolors": "1.1.1", - "pretty-format": "^27.0.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@testing-library/jest-dom": { - "version": "6.7.0", - "resolved": "https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-6.7.0.tgz", - "integrity": "sha512-RI2e97YZ7MRa+vxP4UUnMuMFL2buSsf0ollxUbTgrbPLKhMn8KVTx7raS6DYjC7v1NDVrioOvaShxsguLNISCA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@adobe/css-tools": "^4.4.0", - "aria-query": "^5.0.0", - "css.escape": "^1.5.1", - "dom-accessibility-api": "^0.6.3", - "picocolors": "^1.1.1", - "redent": "^3.0.0" - }, - "engines": { - "node": ">=14", - "npm": ">=6", - "yarn": ">=1" - } - }, - "node_modules/@testing-library/jest-dom/node_modules/dom-accessibility-api": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.6.3.tgz", - "integrity": "sha512-7ZgogeTnjuHbo+ct10G9Ffp0mif17idi0IyWNVA/wcwcm7NPOD/WEHVP3n7n3MhXqxoIYm8d6MuZohYWIZ4T3w==", - "dev": true, - "license": "MIT" - }, - "node_modules/@testing-library/svelte": { - "version": "5.2.0-next.3", - "resolved": "https://registry.npmjs.org/@testing-library/svelte/-/svelte-5.2.0-next.3.tgz", - "integrity": "sha512-aLp9Q84eaI1i25SBQ++PWLijZ7jNoUwjnSnL2cyLyJYBQQSPcEiCgSjDYIygbknOqVkmUE/dsgQHVjGeIatZvg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@testing-library/dom": "^10.0.0" - }, - "engines": { - "node": ">= 10" - }, - "peerDependencies": { - "svelte": "^3 || ^4 || ^5 || ^5.0.0-next.0", - "vite": "*", - "vitest": "*" - }, - "peerDependenciesMeta": { - "vite": { - "optional": true - }, - "vitest": { - "optional": true - } - } - }, - "node_modules/@testing-library/user-event": { - "version": "14.6.1", - "resolved": "https://registry.npmjs.org/@testing-library/user-event/-/user-event-14.6.1.tgz", - "integrity": "sha512-vq7fv0rnt+QTXgPxr5Hjc210p6YKq2kmdziLgnsZGgLJ9e6VAShx1pACLuRjd/AS/sr7phAR58OIIpf0LlmQNw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12", - "npm": ">=6" - }, - "peerDependencies": { - "@testing-library/dom": ">=7.21.4" - } - }, - "node_modules/@tokenizer/token": { - "version": "0.3.0", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/aria-query": { - "version": "5.0.4", - "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-5.0.4.tgz", - "integrity": "sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/chai": { - "version": "5.2.2", - "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.2.tgz", - "integrity": "sha512-8kB30R7Hwqf40JPiKhVzodJs2Qc1ZJ5zuT3uzw5Hq/dhNCl3G3l83jfpdI1e20BP348+fV7VIL/+FxaXkqBmWg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/deep-eql": "*" - } - }, - "node_modules/@types/cookie": { - "version": "0.6.0", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/deep-eql": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz", - "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/estree": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", - "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/node": { - "version": "24.2.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-24.2.0.tgz", - "integrity": "sha512-3xyG3pMCq3oYCNg7/ZP+E1ooTaGB4cG8JWRsqqOYQdbWNY4zbaV0Ennrd7stjiJEFZCaybcIgpTjJWHRfBSIDw==", - "dev": true, - "license": "MIT", - "dependencies": { - "undici-types": "~7.10.0" - } - }, - "node_modules/@types/swagger-schema-official": { - "version": "2.0.25", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/whatwg-mimetype": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/whatwg-mimetype/-/whatwg-mimetype-3.0.2.tgz", - "integrity": "sha512-c2AKvDT8ToxLIOUlN51gTiHXflsfIFisS4pO7pDPoKouJCESkhZnEy623gwP9laCy5lnLDAw1vAzu2vM2YLOrA==", - "dev": true, - "license": "MIT" - }, - "node_modules/@vitest/expect": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.2.4.tgz", - "integrity": "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/chai": "^5.2.2", - "@vitest/spy": "3.2.4", - "@vitest/utils": "3.2.4", - "chai": "^5.2.0", - "tinyrainbow": "^2.0.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/@vitest/mocker": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.4.tgz", - "integrity": "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@vitest/spy": "3.2.4", - "estree-walker": "^3.0.3", - "magic-string": "^0.30.17" - }, - "funding": { - "url": "https://opencollective.com/vitest" - }, - "peerDependencies": { - "msw": "^2.4.9", - "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" - }, - "peerDependenciesMeta": { - "msw": { - "optional": true - }, - "vite": { - "optional": true - } - } - }, - "node_modules/@vitest/pretty-format": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.2.4.tgz", - "integrity": "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==", - "dev": true, - "license": "MIT", - "dependencies": { - "tinyrainbow": "^2.0.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/@vitest/runner": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.2.4.tgz", - "integrity": "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@vitest/utils": "3.2.4", - "pathe": "^2.0.3", - "strip-literal": "^3.0.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/@vitest/snapshot": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.2.4.tgz", - "integrity": "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@vitest/pretty-format": "3.2.4", - "magic-string": "^0.30.17", - "pathe": "^2.0.3" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/@vitest/spy": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.2.4.tgz", - "integrity": "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==", - "dev": true, - "license": "MIT", - "dependencies": { - "tinyspy": "^4.0.3" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/@vitest/ui": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/ui/-/ui-3.2.4.tgz", - "integrity": "sha512-hGISOaP18plkzbWEcP/QvtRW1xDXF2+96HbEX6byqQhAUbiS5oH6/9JwW+QsQCIYON2bI6QZBF+2PvOmrRZ9wA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@vitest/utils": "3.2.4", - "fflate": "^0.8.2", - "flatted": "^3.3.3", - "pathe": "^2.0.3", - "sirv": "^3.0.1", - "tinyglobby": "^0.2.14", - "tinyrainbow": "^2.0.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - }, - "peerDependencies": { - "vitest": "3.2.4" - } - }, - "node_modules/@vitest/utils": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.2.4.tgz", - "integrity": "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@vitest/pretty-format": "3.2.4", - "loupe": "^3.1.4", - "tinyrainbow": "^2.0.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/acorn": { - "version": "8.15.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", - "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", - "dev": true, - "license": "MIT", - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/agent-base": { - "version": "7.1.4", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz", - "integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 14" - } - }, - "node_modules/ansi-escapes": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", - "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "type-fest": "^0.21.3" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ansi-regex": { - "version": "5.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/ansi-styles": { - "version": "4.3.0", - "dev": true, - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/aria-query": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.0.tgz", - "integrity": "sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "dequal": "^2.0.3" - } - }, - "node_modules/assertion-error": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", - "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - } - }, - "node_modules/asynckit": { - "version": "0.4.0", - "dev": true, - "license": "MIT" - }, - "node_modules/autoprefixer": { - "version": "10.4.21", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.21.tgz", - "integrity": "sha512-O+A6LWV5LDHSJD3LjHYoNi4VLsj/Whi7k6zG12xTYaU4cQ8oxQGckXNX8cRHK5yOZ/ppVHe0ZBXGzSV9jXdVbQ==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/autoprefixer" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "browserslist": "^4.24.4", - "caniuse-lite": "^1.0.30001702", - "fraction.js": "^4.3.7", - "normalize-range": "^0.1.2", - "picocolors": "^1.1.1", - "postcss-value-parser": "^4.2.0" - }, - "bin": { - "autoprefixer": "bin/autoprefixer" - }, - "engines": { - "node": "^10 || ^12 || >=14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/axios": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.11.0.tgz", - "integrity": "sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA==", - "dev": true, - "license": "MIT", - "dependencies": { - "follow-redirects": "^1.15.6", - "form-data": "^4.0.4", - "proxy-from-env": "^1.1.0" - } - }, - "node_modules/axios/node_modules/call-bind-apply-helpers": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", - "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/axios/node_modules/dunder-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", - "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.1", - "es-errors": "^1.3.0", - "gopd": "^1.2.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/axios/node_modules/es-define-property": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", - "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/axios/node_modules/es-errors": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", - "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/axios/node_modules/es-object-atoms": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", - "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", - "dev": true, - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/axios/node_modules/es-set-tostringtag": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", - "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", - "dev": true, - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.6", - "has-tostringtag": "^1.0.2", - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/axios/node_modules/follow-redirects": { - "version": "1.15.11", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz", - "integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==", - "dev": true, - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/RubenVerborgh" - } - ], - "license": "MIT", - "engines": { - "node": ">=4.0" - }, - "peerDependenciesMeta": { - "debug": { - "optional": true - } - } - }, - "node_modules/axios/node_modules/form-data": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", - "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", - "dev": true, - "license": "MIT", - "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.8", - "es-set-tostringtag": "^2.1.0", - "hasown": "^2.0.2", - "mime-types": "^2.1.12" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/axios/node_modules/function-bind": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "dev": true, - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/axios/node_modules/get-intrinsic": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", - "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.2", - "es-define-property": "^1.0.1", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.1.1", - "function-bind": "^1.1.2", - "get-proto": "^1.0.1", - "gopd": "^1.2.0", - "has-symbols": "^1.1.0", - "hasown": "^2.0.2", - "math-intrinsics": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/axios/node_modules/get-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", - "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", - "dev": true, - "license": "MIT", - "dependencies": { - "dunder-proto": "^1.0.1", - "es-object-atoms": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/axios/node_modules/gopd": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", - "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/axios/node_modules/has-symbols": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", - "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/axios/node_modules/has-tostringtag": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", - "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", - "dev": true, - "license": "MIT", - "dependencies": { - "has-symbols": "^1.0.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/axios/node_modules/math-intrinsics": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", - "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/axios/node_modules/proxy-from-env": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", - "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", - "dev": true, - "license": "MIT" - }, - "node_modules/base64-js": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/bl": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", - "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", - "dev": true, - "license": "MIT", - "dependencies": { - "buffer": "^5.5.0", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } - }, - "node_modules/browserslist": { - "version": "4.25.1", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.25.1.tgz", - "integrity": "sha512-KGj0KoOMXLpSNkkEI6Z6mShmQy0bc1I+T7K9N81k4WWMrfz+6fQ6es80B/YLAeRoKvjYE1YSHHOW1qe9xIVzHw==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "caniuse-lite": "^1.0.30001726", - "electron-to-chromium": "^1.5.173", - "node-releases": "^2.0.19", - "update-browserslist-db": "^1.1.3" - }, - "bin": { - "browserslist": "cli.js" - }, - "engines": { - "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" - } - }, - "node_modules/browserslist/node_modules/electron-to-chromium": { - "version": "1.5.199", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.199.tgz", - "integrity": "sha512-3gl0S7zQd88kCAZRO/DnxtBKuhMO4h0EaQIN3YgZfV6+pW+5+bf2AdQeHNESCoaQqo/gjGVYEf2YM4O5HJQqpQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/browserslist/node_modules/escalade": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", - "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/browserslist/node_modules/node-releases": { - "version": "2.0.19", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz", - "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==", - "dev": true, - "license": "MIT" - }, - "node_modules/browserslist/node_modules/update-browserslist-db": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz", - "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "escalade": "^3.2.0", - "picocolors": "^1.1.1" - }, - "bin": { - "update-browserslist-db": "cli.js" - }, - "peerDependencies": { - "browserslist": ">= 4.21.0" - } - }, - "node_modules/buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, - "node_modules/c12": { - "version": "3.2.0", - "dev": true, - "license": "MIT", - "dependencies": { - "chokidar": "^4.0.3", - "confbox": "^0.2.2", - "defu": "^6.1.4", - "dotenv": "^17.2.1", - "exsolve": "^1.0.7", - "giget": "^2.0.0", - "jiti": "^2.5.1", - "ohash": "^2.0.11", - "pathe": "^2.0.3", - "perfect-debounce": "^1.0.0", - "pkg-types": "^2.2.0", - "rc9": "^2.1.2" - }, - "peerDependencies": { - "magicast": "^0.3.5" - }, - "peerDependenciesMeta": { - "magicast": { - "optional": true - } - } - }, - "node_modules/c12/node_modules/chokidar": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", - "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", - "dev": true, - "license": "MIT", - "dependencies": { - "readdirp": "^4.0.1" - }, - "engines": { - "node": ">= 14.16.0" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/c12/node_modules/defu": { - "version": "6.1.4", - "resolved": "https://registry.npmjs.org/defu/-/defu-6.1.4.tgz", - "integrity": "sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==", - "dev": true, - "license": "MIT" - }, - "node_modules/c12/node_modules/dotenv": { - "version": "17.2.1", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-17.2.1.tgz", - "integrity": "sha512-kQhDYKZecqnM0fCnzI5eIv5L4cAe/iRI+HqMbO/hbRdTAeXDG+M9FjipUxNfbARuEg4iHIbhnhs78BCHNbSxEQ==", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://dotenvx.com" - } - }, - "node_modules/c12/node_modules/readdirp": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", - "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 14.18.0" - }, - "funding": { - "type": "individual", - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/cac": { - "version": "6.7.14", - "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", - "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/call-me-maybe": { - "version": "1.0.2", - "dev": true, - "license": "MIT" - }, - "node_modules/caniuse-lite": { - "version": "1.0.30001731", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001731.tgz", - "integrity": "sha512-lDdp2/wrOmTRWuoB5DpfNkC0rJDU8DqRa6nYL6HK6sytw70QMopt/NIc/9SM7ylItlBWfACXk0tEn37UWM/+mg==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/caniuse-lite" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "CC-BY-4.0" - }, - "node_modules/chai": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/chai/-/chai-5.3.1.tgz", - "integrity": "sha512-48af6xm9gQK8rhIcOxWwdGzIervm8BVTin+yRp9HEvU20BtVZ2lBywlIJBzwaDtvo0FvjeL7QdCADoUoqIbV3A==", - "dev": true, - "license": "MIT", - "dependencies": { - "assertion-error": "^2.0.1", - "check-error": "^2.1.1", - "deep-eql": "^5.0.1", - "loupe": "^3.1.0", - "pathval": "^2.0.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/chardet": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/chardet/-/chardet-2.1.0.tgz", - "integrity": "sha512-bNFETTG/pM5ryzQ9Ad0lJOTa6HWD/YsScAR3EnCPZRPlQh77JocYktSHOUHelyhm8IARL+o4c4F1bP5KVOjiRA==", - "dev": true, - "license": "MIT" - }, - "node_modules/check-error": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.1.1.tgz", - "integrity": "sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 16" - } - }, - "node_modules/chownr": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", - "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==", - "dev": true, - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=18" - } - }, - "node_modules/citty": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/citty/-/citty-0.1.6.tgz", - "integrity": "sha512-tskPPKEs8D2KPafUypv2gxwJP8h/OaJmC82QQGGDQcHvXX43xF2VDACcJVmZ0EuSxkpO9Kc4MlrA3q0+FG58AQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "consola": "^3.2.3" - } - }, - "node_modules/citty/node_modules/consola": { - "version": "3.4.2", - "resolved": "https://registry.npmjs.org/consola/-/consola-3.4.2.tgz", - "integrity": "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^14.18.0 || >=16.10.0" - } - }, - "node_modules/cli-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", - "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", - "dev": true, - "license": "MIT", - "dependencies": { - "restore-cursor": "^3.1.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/cli-spinners": { - "version": "2.9.2", - "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz", - "integrity": "sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/cli-width": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz", - "integrity": "sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">= 10" - } - }, - "node_modules/clone": { - "version": "1.0.4", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.8" - } - }, - "node_modules/clsx": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", - "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/codemirror": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/codemirror/-/codemirror-6.0.2.tgz", - "integrity": "sha512-VhydHotNW5w1UGK0Qj96BwSk/Zqbp9WbnyK2W/eVMv4QyF41INRGpjUhFJY7/uDNuudSc33a/PKr4iDqRduvHw==", - "license": "MIT", - "dependencies": { - "@codemirror/autocomplete": "^6.0.0", - "@codemirror/commands": "^6.0.0", - "@codemirror/language": "^6.0.0", - "@codemirror/lint": "^6.0.0", - "@codemirror/search": "^6.0.0", - "@codemirror/state": "^6.0.0", - "@codemirror/view": "^6.0.0" - } - }, - "node_modules/color-convert": { - "version": "2.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/color-name": { - "version": "1.1.4", - "dev": true, - "license": "MIT" - }, - "node_modules/combined-stream": { - "version": "1.0.8", - "dev": true, - "license": "MIT", - "dependencies": { - "delayed-stream": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/commander": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", - "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 12" - } - }, - "node_modules/compare-versions": { - "version": "4.1.4", - "dev": true, - "license": "MIT" - }, - "node_modules/concurrently": { - "version": "9.2.0", - "resolved": "https://registry.npmjs.org/concurrently/-/concurrently-9.2.0.tgz", - "integrity": "sha512-IsB/fiXTupmagMW4MNp2lx2cdSN2FfZq78vF90LBB+zZHArbIQZjQtzXCiXnvTxCZSvXanTqFLWBjw2UkLx1SQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "chalk": "^4.1.2", - "lodash": "^4.17.21", - "rxjs": "^7.8.1", - "shell-quote": "^1.8.1", - "supports-color": "^8.1.1", - "tree-kill": "^1.2.2", - "yargs": "^17.7.2" - }, - "bin": { - "conc": "dist/bin/concurrently.js", - "concurrently": "dist/bin/concurrently.js" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/open-cli-tools/concurrently?sponsor=1" - } - }, - "node_modules/concurrently/node_modules/cliui": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", - "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.1", - "wrap-ansi": "^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/concurrently/node_modules/escalade": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", - "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/concurrently/node_modules/shell-quote": { - "version": "1.8.3", - "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.3.tgz", - "integrity": "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/concurrently/node_modules/supports-color": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", - "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/supports-color?sponsor=1" - } - }, - "node_modules/concurrently/node_modules/wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/concurrently/node_modules/y18n": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", - "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=10" - } - }, - "node_modules/concurrently/node_modules/yargs": { - "version": "17.7.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", - "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", - "dev": true, - "license": "MIT", - "dependencies": { - "cliui": "^8.0.1", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.3", - "y18n": "^5.0.5", - "yargs-parser": "^21.1.1" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/concurrently/node_modules/yargs-parser": { - "version": "21.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", - "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/confbox": { - "version": "0.2.2", - "dev": true, - "license": "MIT" - }, - "node_modules/consola": { - "version": "2.15.3", - "resolved": "https://registry.npmjs.org/consola/-/consola-2.15.3.tgz", - "integrity": "sha512-9vAdYbHj6x2fLKC4+oPH0kFzY/orMZyG2Aj+kNylHxKGJ/Ed4dpNyAQYwJOdqO4zdM7XpVHmyejQDcQHrnuXbw==", - "dev": true, - "license": "MIT" - }, - "node_modules/console.table": { - "version": "0.10.0", - "dev": true, - "license": "MIT", - "dependencies": { - "easy-table": "1.1.0" - }, - "engines": { - "node": "> 0.10" - } - }, - "node_modules/cookie": { - "version": "0.7.2", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", - "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/css.escape": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/css.escape/-/css.escape-1.5.1.tgz", - "integrity": "sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg==", - "dev": true, - "license": "MIT" - }, - "node_modules/cssesc": { - "version": "3.0.0", - "license": "MIT", - "bin": { - "cssesc": "bin/cssesc" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/cssstyle": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-4.6.0.tgz", - "integrity": "sha512-2z+rWdzbbSZv6/rhtvzvqeZQHrBaqgogqt85sqFNbabZOuFbCVFb8kPeEtZjiKkbrm395irpNKiYeFeLiQnFPg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@asamuzakjp/css-color": "^3.2.0", - "rrweb-cssom": "^0.8.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/data-urls": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-5.0.0.tgz", - "integrity": "sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg==", - "dev": true, - "license": "MIT", - "dependencies": { - "whatwg-mimetype": "^4.0.0", - "whatwg-url": "^14.0.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/data-urls/node_modules/tr46": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.1.tgz", - "integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==", - "dev": true, - "license": "MIT", - "dependencies": { - "punycode": "^2.3.1" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/data-urls/node_modules/webidl-conversions": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", - "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=12" - } - }, - "node_modules/data-urls/node_modules/whatwg-url": { - "version": "14.2.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.2.0.tgz", - "integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==", - "dev": true, - "license": "MIT", - "dependencies": { - "tr46": "^5.1.0", - "webidl-conversions": "^7.0.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/debug": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", - "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/decimal.js": { - "version": "10.6.0", - "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.6.0.tgz", - "integrity": "sha512-YpgQiITW3JXGntzdUmyUR1V812Hn8T1YVXhCu+wO3OpS4eU9l4YdD3qjyiKdV6mvV29zapkMeD390UVEf2lkUg==", - "dev": true, - "license": "MIT" - }, - "node_modules/deep-eql": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-5.0.2.tgz", - "integrity": "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/deepmerge": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", - "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/defaults": { - "version": "1.0.4", - "dev": true, - "license": "MIT", - "dependencies": { - "clone": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/delayed-stream": { - "version": "1.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/dequal": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", - "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/detect-libc": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.4.tgz", - "integrity": "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">=8" - } - }, - "node_modules/dom-accessibility-api": { - "version": "0.5.16", - "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.16.tgz", - "integrity": "sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==", - "dev": true, - "license": "MIT" - }, - "node_modules/eastasianwidth": { - "version": "0.2.0", - "dev": true, - "license": "MIT" - }, - "node_modules/easy-table": { - "version": "1.1.0", - "dev": true, - "license": "MIT", - "optionalDependencies": { - "wcwidth": ">=1.0.1" - } - }, - "node_modules/emoji-regex": { - "version": "8.0.0", - "dev": true, - "license": "MIT" - }, - "node_modules/enhanced-resolve": { - "version": "5.18.3", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.3.tgz", - "integrity": "sha512-d4lC8xfavMeBjzGr2vECC3fsGXziXZQyJxD868h2M/mBI3PwAuODxAkLkq5HYuvrPYcUtiLzsTo8U3PgX3Ocww==", - "dev": true, - "license": "MIT", - "dependencies": { - "graceful-fs": "^4.2.4", - "tapable": "^2.2.0" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/entities": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz", - "integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=0.12" - }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/es-module-lexer": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", - "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", - "dev": true, - "license": "MIT" - }, - "node_modules/es6-promise": { - "version": "3.3.1", - "dev": true, - "license": "MIT" - }, - "node_modules/esbuild": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.8.tgz", - "integrity": "sha512-vVC0USHGtMi8+R4Kz8rt6JhEWLxsv9Rnu/lGYbPR8u47B+DCBksq9JarW0zOO7bs37hyOK1l2/oqtbciutL5+Q==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "bin": { - "esbuild": "bin/esbuild" - }, - "engines": { - "node": ">=18" - }, - "optionalDependencies": { - "@esbuild/aix-ppc64": "0.25.8", - "@esbuild/android-arm": "0.25.8", - "@esbuild/android-arm64": "0.25.8", - "@esbuild/android-x64": "0.25.8", - "@esbuild/darwin-arm64": "0.25.8", - "@esbuild/darwin-x64": "0.25.8", - "@esbuild/freebsd-arm64": "0.25.8", - "@esbuild/freebsd-x64": "0.25.8", - "@esbuild/linux-arm": "0.25.8", - "@esbuild/linux-arm64": "0.25.8", - "@esbuild/linux-ia32": "0.25.8", - "@esbuild/linux-loong64": "0.25.8", - "@esbuild/linux-mips64el": "0.25.8", - "@esbuild/linux-ppc64": "0.25.8", - "@esbuild/linux-riscv64": "0.25.8", - "@esbuild/linux-s390x": "0.25.8", - "@esbuild/linux-x64": "0.25.8", - "@esbuild/netbsd-arm64": "0.25.8", - "@esbuild/netbsd-x64": "0.25.8", - "@esbuild/openbsd-arm64": "0.25.8", - "@esbuild/openbsd-x64": "0.25.8", - "@esbuild/openharmony-arm64": "0.25.8", - "@esbuild/sunos-x64": "0.25.8", - "@esbuild/win32-arm64": "0.25.8", - "@esbuild/win32-ia32": "0.25.8", - "@esbuild/win32-x64": "0.25.8" - } - }, - "node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/esm-env": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/esm-env/-/esm-env-1.2.2.tgz", - "integrity": "sha512-Epxrv+Nr/CaL4ZcFGPJIYLWFom+YeV1DqMLHJoEd9SYRxNbaFruBwfEX/kkHUJf55j2+TUbmDcmuilbP1TmXHA==", - "dev": true, - "license": "MIT" - }, - "node_modules/esprima": { - "version": "4.0.1", - "dev": true, - "license": "BSD-2-Clause", - "bin": { - "esparse": "bin/esparse.js", - "esvalidate": "bin/esvalidate.js" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/esrap": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/esrap/-/esrap-2.1.0.tgz", - "integrity": "sha512-yzmPNpl7TBbMRC5Lj2JlJZNPml0tzqoqP5B1JXycNUwtqma9AKCO0M2wHrdgsHcy1WRW7S9rJknAMtByg3usgA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/sourcemap-codec": "^1.4.15" - } - }, - "node_modules/estree-walker": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", - "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0" - } - }, - "node_modules/esutils": { - "version": "2.0.3", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/eta": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/eta/-/eta-2.2.0.tgz", - "integrity": "sha512-UVQ72Rqjy/ZKQalzV5dCCJP80GrmPrMxh6NlNf+erV6ObL0ZFkhCstWRawS85z3smdr3d2wXPsZEY7rDPfGd2g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.0.0" - }, - "funding": { - "url": "https://github.com/eta-dev/eta?sponsor=1" - } - }, - "node_modules/expect-type": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.2.2.tgz", - "integrity": "sha512-JhFGDVJ7tmDJItKhYgJCGLOWjuK9vPxiXoUFLwLDc99NlmklilbiQJwoctZtt13+xMw91MCk/REan6MWHqDjyA==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">=12.0.0" - } - }, - "node_modules/exsolve": { - "version": "1.0.7", - "dev": true, - "license": "MIT" - }, - "node_modules/fast-safe-stringify": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", - "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", - "dev": true, - "license": "MIT" - }, - "node_modules/fdir": { - "version": "6.4.6", - "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.6.tgz", - "integrity": "sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w==", - "dev": true, - "license": "MIT", - "peerDependencies": { - "picomatch": "^3 || ^4" - }, - "peerDependenciesMeta": { - "picomatch": { - "optional": true - } - } - }, - "node_modules/fflate": { - "version": "0.8.2", - "resolved": "https://registry.npmjs.org/fflate/-/fflate-0.8.2.tgz", - "integrity": "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==", - "dev": true, - "license": "MIT" - }, - "node_modules/figures": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", - "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", - "dev": true, - "license": "MIT", - "dependencies": { - "escape-string-regexp": "^1.0.5" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/flatted": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", - "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", - "dev": true, - "license": "ISC" - }, - "node_modules/fraction.js": { - "version": "4.3.7", - "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz", - "integrity": "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==", - "dev": true, - "license": "MIT", - "engines": { - "node": "*" - }, - "funding": { - "type": "patreon", - "url": "https://github.com/sponsors/rawify" - } - }, - "node_modules/fs-extra": { - "version": "11.3.1", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.1.tgz", - "integrity": "sha512-eXvGGwZ5CL17ZSwHWd3bbgk7UUpF6IFHtP57NYYakPvHOs8GDgDe5KJI36jIJzDkJ6eJjuzRA8eBQb6SkKue0g==", - "dev": true, - "license": "MIT", - "dependencies": { - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - }, - "engines": { - "node": ">=14.14" - } - }, - "node_modules/fsevents": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", - "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/get-caller-file": { - "version": "2.0.5", - "dev": true, - "license": "ISC", - "engines": { - "node": "6.* || 8.* || >= 10.*" - } - }, - "node_modules/giget": { - "version": "2.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "citty": "^0.1.6", - "consola": "^3.4.0", - "defu": "^6.1.4", - "node-fetch-native": "^1.6.6", - "nypm": "^0.6.0", - "pathe": "^2.0.3" - }, - "bin": { - "giget": "dist/cli.mjs" - } - }, - "node_modules/giget/node_modules/consola": { - "version": "3.4.2", - "resolved": "https://registry.npmjs.org/consola/-/consola-3.4.2.tgz", - "integrity": "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^14.18.0 || >=16.10.0" - } - }, - "node_modules/giget/node_modules/defu": { - "version": "6.1.4", - "resolved": "https://registry.npmjs.org/defu/-/defu-6.1.4.tgz", - "integrity": "sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==", - "dev": true, - "license": "MIT" - }, - "node_modules/giget/node_modules/node-fetch-native": { - "version": "1.6.7", - "resolved": "https://registry.npmjs.org/node-fetch-native/-/node-fetch-native-1.6.7.tgz", - "integrity": "sha512-g9yhqoedzIUm0nTnTqAQvueMPVOuIY16bqgAJJC8XOOubYFNwz6IER9qs0Gq2Xd0+CecCKFjtdDTMA4u4xG06Q==", - "dev": true, - "license": "MIT" - }, - "node_modules/glob": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.3.tgz", - "integrity": "sha512-2Nim7dha1KVkaiF4q6Dj+ngPPMdfvLJEOpZk/jKiUAkqKebpGAWQXAq9z1xu9HKu5lWfqw/FASuccEjyznjPaA==", - "dev": true, - "license": "ISC", - "dependencies": { - "foreground-child": "^3.3.1", - "jackspeak": "^4.1.1", - "minimatch": "^10.0.3", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^2.0.0" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "engines": { - "node": "20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob/node_modules/@isaacs/balanced-match": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@isaacs/balanced-match/-/balanced-match-4.0.1.tgz", - "integrity": "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": "20 || >=22" - } - }, - "node_modules/glob/node_modules/@isaacs/brace-expansion": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/@isaacs/brace-expansion/-/brace-expansion-5.0.0.tgz", - "integrity": "sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@isaacs/balanced-match": "^4.0.1" - }, - "engines": { - "node": "20 || >=22" - } - }, - "node_modules/glob/node_modules/@isaacs/cliui": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", - "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", - "dev": true, - "license": "ISC", - "dependencies": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/glob/node_modules/ansi-regex": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", - "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/glob/node_modules/ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/glob/node_modules/cross-spawn": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", - "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", - "dev": true, - "license": "MIT", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/glob/node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "dev": true, - "license": "MIT" - }, - "node_modules/glob/node_modules/foreground-child": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", - "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", - "dev": true, - "license": "ISC", - "dependencies": { - "cross-spawn": "^7.0.6", - "signal-exit": "^4.0.1" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob/node_modules/jackspeak": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.1.tgz", - "integrity": "sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "@isaacs/cliui": "^8.0.2" - }, - "engines": { - "node": "20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob/node_modules/lru-cache": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.1.0.tgz", - "integrity": "sha512-QIXZUBJUx+2zHUdQujWejBkcD9+cs94tLn0+YL8UrCh+D5sCXZ4c7LaEH48pNwRY3MLDgqUFyhlCyjJPf1WP0A==", - "dev": true, - "license": "ISC", - "engines": { - "node": "20 || >=22" - } - }, - "node_modules/glob/node_modules/minimatch": { - "version": "10.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.3.tgz", - "integrity": "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==", - "dev": true, - "license": "ISC", - "dependencies": { - "@isaacs/brace-expansion": "^5.0.0" - }, - "engines": { - "node": "20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob/node_modules/package-json-from-dist": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", - "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", - "dev": true, - "license": "BlueOak-1.0.0" - }, - "node_modules/glob/node_modules/path-scurry": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.0.tgz", - "integrity": "sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "lru-cache": "^11.0.0", - "minipass": "^7.1.2" - }, - "engines": { - "node": "20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob/node_modules/signal-exit": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob/node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "dev": true, - "license": "MIT", - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/glob/node_modules/strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/glob/node_modules/wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/graceful-fs": { - "version": "4.2.11", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/happy-dom": { - "version": "18.0.1", - "resolved": "https://registry.npmjs.org/happy-dom/-/happy-dom-18.0.1.tgz", - "integrity": "sha512-qn+rKOW7KWpVTtgIUi6RVmTBZJSe2k0Db0vh1f7CWrWclkkc7/Q+FrOfkZIb2eiErLyqu5AXEzE7XthO9JVxRA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "^20.0.0", - "@types/whatwg-mimetype": "^3.0.2", - "whatwg-mimetype": "^3.0.0" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/happy-dom/node_modules/@types/node": { - "version": "20.19.11", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.11.tgz", - "integrity": "sha512-uug3FEEGv0r+jrecvUUpbY8lLisvIjg6AAic6a2bSP5OEOLeJsDSnvhCDov7ipFFMXS3orMpzlmi0ZcuGkBbow==", - "dev": true, - "license": "MIT", - "dependencies": { - "undici-types": "~6.21.0" - } - }, - "node_modules/happy-dom/node_modules/undici-types": { - "version": "6.21.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", - "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/happy-dom/node_modules/whatwg-mimetype": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-3.0.0.tgz", - "integrity": "sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - } - }, - "node_modules/has-flag": { - "version": "4.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/hasown": { - "version": "2.0.2", - "dev": true, - "license": "MIT", - "dependencies": { - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/hasown/node_modules/function-bind": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "dev": true, - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/html-encoding-sniffer": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz", - "integrity": "sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "whatwg-encoding": "^3.1.1" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/http-proxy-agent": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", - "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", - "dev": true, - "license": "MIT", - "dependencies": { - "agent-base": "^7.1.0", - "debug": "^4.3.4" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/https-proxy-agent": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", - "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", - "dev": true, - "license": "MIT", - "dependencies": { - "agent-base": "^7.1.2", - "debug": "4" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/iconv-lite": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", - "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", - "dev": true, - "license": "MIT", - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/ieee754": { - "version": "1.2.1", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "BSD-3-Clause" - }, - "node_modules/indent-string": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", - "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/inquirer": { - "version": "8.2.7", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-8.2.7.tgz", - "integrity": "sha512-UjOaSel/iddGZJ5xP/Eixh6dY1XghiBw4XK13rCCIJcJfyhhoul/7KhLLUGtebEj6GDYM6Vnx/mVsjx2L/mFIA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@inquirer/external-editor": "^1.0.0", - "ansi-escapes": "^4.2.1", - "chalk": "^4.1.1", - "cli-cursor": "^3.1.0", - "cli-width": "^3.0.0", - "figures": "^3.0.0", - "lodash": "^4.17.21", - "mute-stream": "0.0.8", - "ora": "^5.4.1", - "run-async": "^2.4.0", - "rxjs": "^7.5.5", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0", - "through": "^2.3.6", - "wrap-ansi": "^6.0.1" - }, - "engines": { - "node": ">=12.0.0" - } - }, - "node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/is-interactive": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-interactive/-/is-interactive-1.0.0.tgz", - "integrity": "sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/is-potential-custom-element-name": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", - "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/is-reference": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-3.0.3.tgz", - "integrity": "sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.6" - } - }, - "node_modules/is-unicode-supported": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", - "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/isexe": { - "version": "2.0.0", - "dev": true, - "license": "ISC" - }, - "node_modules/iterare": { - "version": "1.2.1", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=6" - } - }, - "node_modules/jiti": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.5.1.tgz", - "integrity": "sha512-twQoecYPiVA5K/h6SxtORw/Bs3ar+mLUtoPSc7iMXzQzK8d7eJ/R09wmTwAjiamETn1cXYPGfNnu7DMoHgu12w==", - "dev": true, - "license": "MIT", - "bin": { - "jiti": "lib/jiti-cli.mjs" - } - }, - "node_modules/js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", - "dev": true, - "license": "MIT", - "dependencies": { - "argparse": "^2.0.1" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, - "node_modules/js-yaml/node_modules/argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true, - "license": "Python-2.0" - }, - "node_modules/jsbn": { - "version": "1.1.0", - "dev": true, - "license": "MIT" - }, - "node_modules/jsdom": { - "version": "26.1.0", - "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-26.1.0.tgz", - "integrity": "sha512-Cvc9WUhxSMEo4McES3P7oK3QaXldCfNWp7pl2NNeiIFlCoLr3kfq9kb1fxftiwk1FLV7CvpvDfonxtzUDeSOPg==", - "dev": true, - "license": "MIT", - "dependencies": { - "cssstyle": "^4.2.1", - "data-urls": "^5.0.0", - "decimal.js": "^10.5.0", - "html-encoding-sniffer": "^4.0.0", - "http-proxy-agent": "^7.0.2", - "https-proxy-agent": "^7.0.6", - "is-potential-custom-element-name": "^1.0.1", - "nwsapi": "^2.2.16", - "parse5": "^7.2.1", - "rrweb-cssom": "^0.8.0", - "saxes": "^6.0.0", - "symbol-tree": "^3.2.4", - "tough-cookie": "^5.1.1", - "w3c-xmlserializer": "^5.0.0", - "webidl-conversions": "^7.0.0", - "whatwg-encoding": "^3.1.1", - "whatwg-mimetype": "^4.0.0", - "whatwg-url": "^14.1.1", - "ws": "^8.18.0", - "xml-name-validator": "^5.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "canvas": "^3.0.0" - }, - "peerDependenciesMeta": { - "canvas": { - "optional": true - } - } - }, - "node_modules/jsdom/node_modules/tr46": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.1.tgz", - "integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==", - "dev": true, - "license": "MIT", - "dependencies": { - "punycode": "^2.3.1" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/jsdom/node_modules/webidl-conversions": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", - "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=12" - } - }, - "node_modules/jsdom/node_modules/whatwg-url": { - "version": "14.2.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.2.0.tgz", - "integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==", - "dev": true, - "license": "MIT", - "dependencies": { - "tr46": "^5.1.0", - "webidl-conversions": "^7.0.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/jsonfile": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", - "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", - "dev": true, - "license": "MIT", - "dependencies": { - "universalify": "^2.0.0" - }, - "optionalDependencies": { - "graceful-fs": "^4.1.6" - } - }, - "node_modules/kleur": { - "version": "4.1.5", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/lightningcss": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.30.1.tgz", - "integrity": "sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg==", - "dev": true, - "license": "MPL-2.0", - "dependencies": { - "detect-libc": "^2.0.3" - }, - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - }, - "optionalDependencies": { - "lightningcss-darwin-arm64": "1.30.1", - "lightningcss-darwin-x64": "1.30.1", - "lightningcss-freebsd-x64": "1.30.1", - "lightningcss-linux-arm-gnueabihf": "1.30.1", - "lightningcss-linux-arm64-gnu": "1.30.1", - "lightningcss-linux-arm64-musl": "1.30.1", - "lightningcss-linux-x64-gnu": "1.30.1", - "lightningcss-linux-x64-musl": "1.30.1", - "lightningcss-win32-arm64-msvc": "1.30.1", - "lightningcss-win32-x64-msvc": "1.30.1" - } - }, - "node_modules/lightningcss-darwin-arm64": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.30.1.tgz", - "integrity": "sha512-c8JK7hyE65X1MHMN+Viq9n11RRC7hgin3HhYKhrMyaXflk5GVplZ60IxyoVtzILeKr+xAJwg6zK6sjTBJ0FKYQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MPL-2.0", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/lightningcss-darwin-x64": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.30.1.tgz", - "integrity": "sha512-k1EvjakfumAQoTfcXUcHQZhSpLlkAuEkdMBsI/ivWw9hL+7FtilQc0Cy3hrx0AAQrVtQAbMI7YjCgYgvn37PzA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MPL-2.0", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/lightningcss-freebsd-x64": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.30.1.tgz", - "integrity": "sha512-kmW6UGCGg2PcyUE59K5r0kWfKPAVy4SltVeut+umLCFoJ53RdCUWxcRDzO1eTaxf/7Q2H7LTquFHPL5R+Gjyig==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MPL-2.0", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/lightningcss-linux-arm-gnueabihf": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.30.1.tgz", - "integrity": "sha512-MjxUShl1v8pit+6D/zSPq9S9dQ2NPFSQwGvxBCYaBYLPlCWuPh9/t1MRS8iUaR8i+a6w7aps+B4N0S1TYP/R+Q==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MPL-2.0", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/lightningcss-linux-arm64-gnu": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.30.1.tgz", - "integrity": "sha512-gB72maP8rmrKsnKYy8XUuXi/4OctJiuQjcuqWNlJQ6jZiWqtPvqFziskH3hnajfvKB27ynbVCucKSm2rkQp4Bw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MPL-2.0", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/lightningcss-linux-arm64-musl": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.30.1.tgz", - "integrity": "sha512-jmUQVx4331m6LIX+0wUhBbmMX7TCfjF5FoOH6SD1CttzuYlGNVpA7QnrmLxrsub43ClTINfGSYyHe2HWeLl5CQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MPL-2.0", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/lightningcss-linux-x64-gnu": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.30.1.tgz", - "integrity": "sha512-piWx3z4wN8J8z3+O5kO74+yr6ze/dKmPnI7vLqfSqI8bccaTGY5xiSGVIJBDd5K5BHlvVLpUB3S2YCfelyJ1bw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MPL-2.0", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/lightningcss-linux-x64-musl": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.30.1.tgz", - "integrity": "sha512-rRomAK7eIkL+tHY0YPxbc5Dra2gXlI63HL+v1Pdi1a3sC+tJTcFrHX+E86sulgAXeI7rSzDYhPSeHHjqFhqfeQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MPL-2.0", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/lightningcss-win32-arm64-msvc": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.30.1.tgz", - "integrity": "sha512-mSL4rqPi4iXq5YVqzSsJgMVFENoa4nGTT/GjO2c0Yl9OuQfPsIfncvLrEW6RbbB24WtZ3xP/2CCmI3tNkNV4oA==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MPL-2.0", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/lightningcss-win32-x64-msvc": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.30.1.tgz", - "integrity": "sha512-PVqXh48wh4T53F/1CCu8PIPCxLzWyCnn/9T5W1Jpmdy5h9Cwd+0YQS6/LwhHXSafuc61/xg9Lv5OrCby6a++jg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MPL-2.0", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/load-esm": { - "version": "1.0.2", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/Borewit" - }, - { - "type": "buymeacoffee", - "url": "https://buymeacoffee.com/borewit" - } - ], - "license": "MIT", - "engines": { - "node": ">=13.2.0" - } - }, - "node_modules/locate-character": { - "version": "3.0.0", - "dev": true, - "license": "MIT" - }, - "node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true, - "license": "MIT" - }, - "node_modules/lodash.castarray": { - "version": "4.4.0", - "license": "MIT" - }, - "node_modules/lodash.isplainobject": { - "version": "4.0.6", - "license": "MIT" - }, - "node_modules/lodash.merge": { - "version": "4.6.2", - "license": "MIT" - }, - "node_modules/log-symbols": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", - "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", - "dev": true, - "license": "MIT", - "dependencies": { - "chalk": "^4.1.0", - "is-unicode-supported": "^0.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/loupe": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.2.0.tgz", - "integrity": "sha512-2NCfZcT5VGVNX9mSZIxLRkEAegDGBpuQZBy13desuHeVORmBDyAET4TkJr4SjqQy3A8JDofMN6LpkK8Xcm/dlw==", - "dev": true, - "license": "MIT" - }, - "node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/lz-string": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz", - "integrity": "sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==", - "dev": true, - "license": "MIT", - "bin": { - "lz-string": "bin/bin.js" - } - }, - "node_modules/magic-string": { - "version": "0.30.17", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz", - "integrity": "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/sourcemap-codec": "^1.5.0" - } - }, - "node_modules/mime-types": { - "version": "2.1.35", - "dev": true, - "license": "MIT", - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mime-types/node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/min-indent": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", - "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/mini-svg-data-uri": { - "version": "1.4.4", - "dev": true, - "license": "MIT", - "bin": { - "mini-svg-data-uri": "cli.js" - } - }, - "node_modules/minipass": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/minizlib": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz", - "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==", - "dev": true, - "license": "MIT", - "dependencies": { - "minipass": "^7.1.2" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/mkdirp": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", - "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==", - "dev": true, - "license": "MIT", - "bin": { - "mkdirp": "dist/cjs/src/bin.js" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/mri": { - "version": "1.2.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/mrmime": { - "version": "2.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - } - }, - "node_modules/ms": { - "version": "2.1.3", - "dev": true, - "license": "MIT" - }, - "node_modules/mute-stream": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", - "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", - "dev": true, - "license": "ISC" - }, - "node_modules/node-fetch-h2": { - "version": "2.3.0", - "dev": true, - "license": "MIT", - "dependencies": { - "http2-client": "^1.2.5" - }, - "engines": { - "node": "4.x || >=6.0.0" - } - }, - "node_modules/node-fetch-h2/node_modules/http2-client": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/http2-client/-/http2-client-1.3.5.tgz", - "integrity": "sha512-EC2utToWl4RKfs5zd36Mxq7nzHHBuomZboI0yYL6Y0RmBgT7Sgkq4rQ0ezFTYoIsSs7Tm9SJe+o2FcAg6GBhGA==", - "dev": true, - "license": "MIT" - }, - "node_modules/node-readfiles": { - "version": "0.2.0", - "dev": true, - "license": "MIT", - "dependencies": { - "es6-promise": "^3.2.1" - } - }, - "node_modules/normalize-range": { - "version": "0.1.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/nwsapi": { - "version": "2.2.21", - "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.21.tgz", - "integrity": "sha512-o6nIY3qwiSXl7/LuOU0Dmuctd34Yay0yeuZRLFmDPrrdHpXKFndPj3hM+YEPVHYC5fx2otBx4Ilc/gyYSAUaIA==", - "dev": true, - "license": "MIT" - }, - "node_modules/nypm": { - "version": "0.6.1", - "dev": true, - "license": "MIT", - "dependencies": { - "citty": "^0.1.6", - "consola": "^3.4.2", - "pathe": "^2.0.3", - "pkg-types": "^2.2.0", - "tinyexec": "^1.0.1" - }, - "bin": { - "nypm": "dist/cli.mjs" - }, - "engines": { - "node": "^14.16.0 || >=16.10.0" - } - }, - "node_modules/nypm/node_modules/consola": { - "version": "3.4.2", - "resolved": "https://registry.npmjs.org/consola/-/consola-3.4.2.tgz", - "integrity": "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^14.18.0 || >=16.10.0" - } - }, - "node_modules/nypm/node_modules/tinyexec": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.1.tgz", - "integrity": "sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw==", - "dev": true, - "license": "MIT" - }, - "node_modules/oas-kit-common": { - "version": "1.0.8", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "fast-safe-stringify": "^2.0.7" - } - }, - "node_modules/oas-linter": { - "version": "3.2.2", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "@exodus/schemasafe": "^1.0.0-rc.2", - "should": "^13.2.1", - "yaml": "^1.10.0" - }, - "funding": { - "url": "https://github.com/Mermade/oas-kit?sponsor=1" - } - }, - "node_modules/oas-linter/node_modules/@exodus/schemasafe": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/@exodus/schemasafe/-/schemasafe-1.3.0.tgz", - "integrity": "sha512-5Aap/GaRupgNx/feGBwLLTVv8OQFfv3pq2lPRzPg9R+IOBnDgghTGW7l7EuVXOvg5cc/xSAlRW8rBrjIC3Nvqw==", - "dev": true, - "license": "MIT" - }, - "node_modules/oas-linter/node_modules/yaml": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", - "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">= 6" - } - }, - "node_modules/oas-schema-walker": { - "version": "1.1.5", - "dev": true, - "license": "BSD-3-Clause", - "funding": { - "url": "https://github.com/Mermade/oas-kit?sponsor=1" - } - }, - "node_modules/ohash": { - "version": "2.0.11", - "dev": true, - "license": "MIT" - }, - "node_modules/onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "mimic-fn": "^2.1.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ora": { - "version": "5.4.1", - "resolved": "https://registry.npmjs.org/ora/-/ora-5.4.1.tgz", - "integrity": "sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "bl": "^4.1.0", - "chalk": "^4.1.0", - "cli-cursor": "^3.1.0", - "cli-spinners": "^2.5.0", - "is-interactive": "^1.0.0", - "is-unicode-supported": "^0.1.0", - "log-symbols": "^4.1.0", - "strip-ansi": "^6.0.0", - "wcwidth": "^1.0.1" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/parse5": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.3.0.tgz", - "integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==", - "dev": true, - "license": "MIT", - "dependencies": { - "entities": "^6.0.0" - }, - "funding": { - "url": "https://github.com/inikulin/parse5?sponsor=1" - } - }, - "node_modules/path-key": { - "version": "3.1.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/path-to-regexp": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.2.0.tgz", - "integrity": "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=16" - } - }, - "node_modules/pathe": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", - "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", - "dev": true, - "license": "MIT" - }, - "node_modules/pathval": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.1.tgz", - "integrity": "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 14.16" - } - }, - "node_modules/perfect-debounce": { - "version": "1.0.0", - "dev": true, - "license": "MIT" - }, - "node_modules/picocolors": { - "version": "1.1.1", - "dev": true, - "license": "ISC" - }, - "node_modules/picomatch": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", - "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/pkg-types": { - "version": "2.2.0", - "dev": true, - "license": "MIT", - "dependencies": { - "confbox": "^0.2.2", - "exsolve": "^1.0.7", - "pathe": "^2.0.3" - } - }, - "node_modules/playwright": { - "version": "1.54.2", - "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.54.2.tgz", - "integrity": "sha512-Hu/BMoA1NAdRUuulyvQC0pEqZ4vQbGfn8f7wPXcnqQmM+zct9UliKxsIkLNmz/ku7LElUNqmaiv1TG/aL5ACsw==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "playwright-core": "1.54.2" - }, - "bin": { - "playwright": "cli.js" - }, - "engines": { - "node": ">=18" - }, - "optionalDependencies": { - "fsevents": "2.3.2" - } - }, - "node_modules/playwright-core": { - "version": "1.54.2", - "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.54.2.tgz", - "integrity": "sha512-n5r4HFbMmWsB4twG7tJLDN9gmBUeSPcsBZiWSE4DnYz9mJMAFqr2ID7+eGC9kpEnxExJ1epttwR59LEWCk8mtA==", - "dev": true, - "license": "Apache-2.0", - "bin": { - "playwright-core": "cli.js" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/postcss": { - "version": "8.5.6", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", - "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/postcss" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "nanoid": "^3.3.11", - "picocolors": "^1.1.1", - "source-map-js": "^1.2.1" - }, - "engines": { - "node": "^10 || ^12 || >=14" - } - }, - "node_modules/postcss-selector-parser": { - "version": "6.0.10", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz", - "integrity": "sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==", - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-value-parser": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", - "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/postcss/node_modules/nanoid": { - "version": "3.3.11", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", - "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "bin": { - "nanoid": "bin/nanoid.cjs" - }, - "engines": { - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" - } - }, - "node_modules/pretty-format": { - "version": "27.5.1", - "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.5.1.tgz", - "integrity": "sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1", - "ansi-styles": "^5.0.0", - "react-is": "^17.0.1" - }, - "engines": { - "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" - } - }, - "node_modules/pretty-format/node_modules/ansi-styles": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", - "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/proxy-agent": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/proxy-agent/-/proxy-agent-6.5.0.tgz", - "integrity": "sha512-TmatMXdr2KlRiA2CyDu8GqR8EjahTG3aY3nXjdzFyoZbmB8hrBsTyMezhULIXKnC0jpfjlmiZ3+EaCzoInSu/A==", - "dev": true, - "license": "MIT", - "dependencies": { - "agent-base": "^7.1.2", - "debug": "^4.3.4", - "http-proxy-agent": "^7.0.1", - "https-proxy-agent": "^7.0.6", - "lru-cache": "^7.14.1", - "pac-proxy-agent": "^7.1.0", - "proxy-from-env": "^1.1.0", - "socks-proxy-agent": "^8.0.5" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/proxy-agent/node_modules/@tootallnate/quickjs-emscripten": { - "version": "0.23.0", - "resolved": "https://registry.npmjs.org/@tootallnate/quickjs-emscripten/-/quickjs-emscripten-0.23.0.tgz", - "integrity": "sha512-C5Mc6rdnsaJDjO3UpGW/CQTHtCKaYlScZTly4JIu97Jxo/odCiH0ITnDXSJPTOrEKk/ycSZ0AOgTmkDtkOsvIA==", - "dev": true, - "license": "MIT" - }, - "node_modules/proxy-agent/node_modules/ast-types": { - "version": "0.13.4", - "resolved": "https://registry.npmjs.org/ast-types/-/ast-types-0.13.4.tgz", - "integrity": "sha512-x1FCFnFifvYDDzTaLII71vG5uvDwgtmDTEVWAxrgeiR8VjMONcCXJx7E+USjDtHlwFmt9MysbqgF9b9Vjr6w+w==", - "dev": true, - "license": "MIT", - "dependencies": { - "tslib": "^2.0.1" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/proxy-agent/node_modules/basic-ftp": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/basic-ftp/-/basic-ftp-5.0.5.tgz", - "integrity": "sha512-4Bcg1P8xhUuqcii/S0Z9wiHIrQVPMermM1any+MX5GeGD7faD3/msQUDGLol9wOcz4/jbg/WJnGqoJF6LiBdtg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/proxy-agent/node_modules/data-uri-to-buffer": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-6.0.2.tgz", - "integrity": "sha512-7hvf7/GW8e86rW0ptuwS3OcBGDjIi6SZva7hCyWC0yYry2cOPmLIjXAUHI6DK2HsnwJd9ifmt57i8eV2n4YNpw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 14" - } - }, - "node_modules/proxy-agent/node_modules/degenerator": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/degenerator/-/degenerator-5.0.1.tgz", - "integrity": "sha512-TllpMR/t0M5sqCXfj85i4XaAzxmS5tVA16dqvdkMwGmzI+dXLXnw3J+3Vdv7VKw+ThlTMboK6i9rnZ6Nntj5CQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ast-types": "^0.13.4", - "escodegen": "^2.1.0", - "esprima": "^4.0.1" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/proxy-agent/node_modules/escodegen": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.1.0.tgz", - "integrity": "sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "esprima": "^4.0.1", - "estraverse": "^5.2.0", - "esutils": "^2.0.2" - }, - "bin": { - "escodegen": "bin/escodegen.js", - "esgenerate": "bin/esgenerate.js" - }, - "engines": { - "node": ">=6.0" - }, - "optionalDependencies": { - "source-map": "~0.6.1" - } - }, - "node_modules/proxy-agent/node_modules/estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=4.0" - } - }, - "node_modules/proxy-agent/node_modules/get-uri": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/get-uri/-/get-uri-6.0.5.tgz", - "integrity": "sha512-b1O07XYq8eRuVzBNgJLstU6FYc1tS6wnMtF1I1D9lE8LxZSOGZ7LhxN54yPP6mGw5f2CkXY2BQUL9Fx41qvcIg==", - "dev": true, - "license": "MIT", - "dependencies": { - "basic-ftp": "^5.0.2", - "data-uri-to-buffer": "^6.0.2", - "debug": "^4.3.4" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/proxy-agent/node_modules/ip-address": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-9.0.5.tgz", - "integrity": "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==", - "dev": true, - "license": "MIT", - "dependencies": { - "jsbn": "1.1.0", - "sprintf-js": "^1.1.3" - }, - "engines": { - "node": ">= 12" - } - }, - "node_modules/proxy-agent/node_modules/lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/proxy-agent/node_modules/netmask": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/netmask/-/netmask-2.0.2.tgz", - "integrity": "sha512-dBpDMdxv9Irdq66304OLfEmQ9tbNRFnFTuZiLo+bD+r332bBmMJ8GBLXklIXXgxd3+v9+KUnZaUR5PJMa75Gsg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4.0" - } - }, - "node_modules/proxy-agent/node_modules/pac-proxy-agent": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/pac-proxy-agent/-/pac-proxy-agent-7.2.0.tgz", - "integrity": "sha512-TEB8ESquiLMc0lV8vcd5Ql/JAKAoyzHFXaStwjkzpOpC5Yv+pIzLfHvjTSdf3vpa2bMiUQrg9i6276yn8666aA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@tootallnate/quickjs-emscripten": "^0.23.0", - "agent-base": "^7.1.2", - "debug": "^4.3.4", - "get-uri": "^6.0.1", - "http-proxy-agent": "^7.0.0", - "https-proxy-agent": "^7.0.6", - "pac-resolver": "^7.0.1", - "socks-proxy-agent": "^8.0.5" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/proxy-agent/node_modules/pac-resolver": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/pac-resolver/-/pac-resolver-7.0.1.tgz", - "integrity": "sha512-5NPgf87AT2STgwa2ntRMr45jTKrYBGkVU36yT0ig/n/GMAa3oPqhZfIQ2kMEimReg0+t9kZViDVZ83qfVUlckg==", - "dev": true, - "license": "MIT", - "dependencies": { - "degenerator": "^5.0.0", - "netmask": "^2.0.2" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/proxy-agent/node_modules/proxy-from-env": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", - "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", - "dev": true, - "license": "MIT" - }, - "node_modules/proxy-agent/node_modules/smart-buffer": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", - "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 6.0.0", - "npm": ">= 3.0.0" - } - }, - "node_modules/proxy-agent/node_modules/socks": { - "version": "2.8.6", - "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.6.tgz", - "integrity": "sha512-pe4Y2yzru68lXCb38aAqRf5gvN8YdjP1lok5o0J7BOHljkyCGKVz7H3vpVIXKD27rj2giOJ7DwVyk/GWrPHDWA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ip-address": "^9.0.5", - "smart-buffer": "^4.2.0" - }, - "engines": { - "node": ">= 10.0.0", - "npm": ">= 3.0.0" - } - }, - "node_modules/proxy-agent/node_modules/socks-proxy-agent": { - "version": "8.0.5", - "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-8.0.5.tgz", - "integrity": "sha512-HehCEsotFqbPW9sJ8WVYB6UbmIMv7kUUORIF2Nncq4VQvBfNBLibW9YZR5dlYCSUhwcD628pRllm7n+E+YTzJw==", - "dev": true, - "license": "MIT", - "dependencies": { - "agent-base": "^7.1.2", - "debug": "^4.3.4", - "socks": "^2.8.3" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/proxy-agent/node_modules/sprintf-js": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz", - "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==", - "dev": true, - "license": "BSD-3-Clause" - }, - "node_modules/punycode": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", - "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/rc9": { - "version": "2.1.2", - "dev": true, - "license": "MIT", - "dependencies": { - "defu": "^6.1.4", - "destr": "^2.0.3" - } - }, - "node_modules/rc9/node_modules/defu": { - "version": "6.1.4", - "resolved": "https://registry.npmjs.org/defu/-/defu-6.1.4.tgz", - "integrity": "sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==", - "dev": true, - "license": "MIT" - }, - "node_modules/rc9/node_modules/destr": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/destr/-/destr-2.0.5.tgz", - "integrity": "sha512-ugFTXCtDZunbzasqBxrK93Ik/DRYsO6S/fedkWEMKqt04xZ4csmnmwGDBAb07QWNaGMAmnTIemsYZCksjATwsA==", - "dev": true, - "license": "MIT" - }, - "node_modules/react-is": { - "version": "17.0.2", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", - "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", - "dev": true, - "license": "MIT" - }, - "node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dev": true, - "license": "MIT", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/redent": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", - "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", - "dev": true, - "license": "MIT", - "dependencies": { - "indent-string": "^4.0.0", - "strip-indent": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/reflect-metadata": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/reflect-metadata/-/reflect-metadata-0.2.2.tgz", - "integrity": "sha512-urBwgfrvVP/eAyXx4hluJivBKzuEbSQs9rKWCrCkbSxNv8mxPcUZKeuoF3Uy4mJl3Lwprp6yy5/39VWigZ4K6Q==", - "dev": true, - "license": "Apache-2.0" - }, - "node_modules/require-directory": { - "version": "2.1.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/restore-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", - "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", - "dev": true, - "license": "MIT", - "dependencies": { - "onetime": "^5.1.0", - "signal-exit": "^3.0.2" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/rrweb-cssom": { - "version": "0.8.0", - "resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.8.0.tgz", - "integrity": "sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw==", - "dev": true, - "license": "MIT" - }, - "node_modules/run-async": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.1.tgz", - "integrity": "sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/rxjs": { - "version": "7.8.2", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.2.tgz", - "integrity": "sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "tslib": "^2.1.0" - } - }, - "node_modules/sade": { - "version": "1.8.1", - "dev": true, - "license": "MIT", - "dependencies": { - "mri": "^1.1.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "dev": true, - "license": "MIT" - }, - "node_modules/saxes": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/saxes/-/saxes-6.0.0.tgz", - "integrity": "sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==", - "dev": true, - "license": "ISC", - "dependencies": { - "xmlchars": "^2.2.0" - }, - "engines": { - "node": ">=v12.22.7" - } - }, - "node_modules/set-cookie-parser": { - "version": "2.7.1", - "dev": true, - "license": "MIT" - }, - "node_modules/shebang-command": { - "version": "2.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/should": { - "version": "13.2.3", - "dev": true, - "license": "MIT", - "dependencies": { - "should-equal": "^2.0.0", - "should-format": "^3.0.3", - "should-type": "^1.4.0", - "should-type-adaptors": "^1.0.1", - "should-util": "^1.0.0" - } - }, - "node_modules/should-equal": { - "version": "2.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "should-type": "^1.4.0" - } - }, - "node_modules/should-format": { - "version": "3.0.3", - "dev": true, - "license": "MIT", - "dependencies": { - "should-type": "^1.3.0", - "should-type-adaptors": "^1.0.1" - } - }, - "node_modules/should-type": { - "version": "1.4.0", - "dev": true, - "license": "MIT" - }, - "node_modules/should-type-adaptors": { - "version": "1.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "should-type": "^1.3.0", - "should-util": "^1.0.0" - } - }, - "node_modules/should-util": { - "version": "1.0.1", - "dev": true, - "license": "MIT" - }, - "node_modules/siginfo": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz", - "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==", - "dev": true, - "license": "ISC" - }, - "node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/sirv": { - "version": "3.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@polka/url": "^1.0.0-next.24", - "mrmime": "^2.0.0", - "totalist": "^3.0.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/sirv/node_modules/totalist": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/totalist/-/totalist-3.0.1.tgz", - "integrity": "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/source-map": { - "version": "0.6.1", - "dev": true, - "license": "BSD-3-Clause", - "optional": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/source-map-js": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", - "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/stackback": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", - "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==", - "dev": true, - "license": "MIT" - }, - "node_modules/std-env": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.9.0.tgz", - "integrity": "sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==", - "dev": true, - "license": "MIT" - }, - "node_modules/string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "dev": true, - "license": "MIT", - "dependencies": { - "safe-buffer": "~5.2.0" - } - }, - "node_modules/string-width": { - "version": "4.2.3", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs": { - "name": "string-width", - "version": "4.2.3", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi": { - "version": "6.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi-cjs": { - "name": "strip-ansi", - "version": "6.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-indent": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", - "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "min-indent": "^1.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-literal": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-3.0.0.tgz", - "integrity": "sha512-TcccoMhJOM3OebGhSBEmp3UZ2SfDMZUEBdRA/9ynfLi8yYajyWX3JiXArcJt4Umh4vISpspkQIY8ZZoCqjbviA==", - "dev": true, - "license": "MIT", - "dependencies": { - "js-tokens": "^9.0.1" - }, - "funding": { - "url": "https://github.com/sponsors/antfu" - } - }, - "node_modules/strip-literal/node_modules/js-tokens": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.1.tgz", - "integrity": "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/supports-color": { - "version": "7.2.0", - "dev": true, - "license": "MIT", - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/svelte": { - "version": "5.38.0", - "resolved": "https://registry.npmjs.org/svelte/-/svelte-5.38.0.tgz", - "integrity": "sha512-cWF1Oc2IM/QbktdK89u5lt9MdKxRtQnRKnf2tq6KOhYuhLOd2hbMuTiJ+vWMzAeMDe81AzbCgLd4GVtOJ4fDRg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@ampproject/remapping": "^2.3.0", - "@jridgewell/sourcemap-codec": "^1.5.0", - "@sveltejs/acorn-typescript": "^1.0.5", - "@types/estree": "^1.0.5", - "acorn": "^8.12.1", - "aria-query": "^5.3.1", - "axobject-query": "^4.1.0", - "clsx": "^2.1.1", - "esm-env": "^1.2.1", - "esrap": "^2.1.0", - "is-reference": "^3.0.3", - "locate-character": "^3.0.0", - "magic-string": "^0.30.11", - "zimmerframe": "^1.1.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/svelte-check": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/svelte-check/-/svelte-check-4.3.1.tgz", - "integrity": "sha512-lkh8gff5gpHLjxIV+IaApMxQhTGnir2pNUAqcNgeKkvK5bT/30Ey/nzBxNLDlkztCH4dP7PixkMt9SWEKFPBWg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/trace-mapping": "^0.3.25", - "chokidar": "^4.0.1", - "fdir": "^6.2.0", - "picocolors": "^1.0.0", - "sade": "^1.7.4" - }, - "bin": { - "svelte-check": "bin/svelte-check" - }, - "engines": { - "node": ">= 18.0.0" - }, - "peerDependencies": { - "svelte": "^4.0.0 || ^5.0.0-next.0", - "typescript": ">=5.0.0" - } - }, - "node_modules/svelte-check/node_modules/chokidar": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", - "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", - "dev": true, - "license": "MIT", - "dependencies": { - "readdirp": "^4.0.1" - }, - "engines": { - "node": ">= 14.16.0" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/svelte-check/node_modules/readdirp": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", - "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 14.18.0" - }, - "funding": { - "type": "individual", - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/svelte/node_modules/aria-query": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.2.tgz", - "integrity": "sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/svelte/node_modules/axobject-query": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-4.1.0.tgz", - "integrity": "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/swagger-schema-official": { - "version": "2.0.0-bab6bed", - "dev": true, - "license": "ISC" - }, - "node_modules/swagger-typescript-api": { - "version": "13.2.7", - "resolved": "https://registry.npmjs.org/swagger-typescript-api/-/swagger-typescript-api-13.2.7.tgz", - "integrity": "sha512-rfqqoRFpZJPl477M/snMJPM90EvI8WqhuUHSF5ecC2r/w376T29+QXNJFVPsJmbFu5rBc/8m3vhArtMctjONdw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@biomejs/js-api": "1.0.0", - "@biomejs/wasm-nodejs": "2.0.5", - "@types/swagger-schema-official": "^2.0.25", - "c12": "^3.0.4", - "citty": "^0.1.6", - "consola": "^3.4.2", - "eta": "^2.2.0", - "js-yaml": "^4.1.0", - "lodash": "^4.17.21", - "nanoid": "^5.1.5", - "swagger-schema-official": "2.0.0-bab6bed", - "swagger2openapi": "^7.0.8", - "typescript": "~5.8.3" - }, - "bin": { - "sta": "dist/cli.js", - "swagger-typescript-api": "dist/cli.js" - }, - "engines": { - "node": ">=20" - } - }, - "node_modules/swagger-typescript-api/node_modules/consola": { - "version": "3.4.2", - "resolved": "https://registry.npmjs.org/consola/-/consola-3.4.2.tgz", - "integrity": "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^14.18.0 || >=16.10.0" - } - }, - "node_modules/swagger-typescript-api/node_modules/nanoid": { - "version": "5.1.5", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.5.tgz", - "integrity": "sha512-Ir/+ZpE9fDsNH0hQ3C68uyThDXzYcim2EqcZ8zn8Chtt1iylPT9xXJB0kPCnqzgcEGikO9RxSrh63MsmVCU7Fw==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "bin": { - "nanoid": "bin/nanoid.js" - }, - "engines": { - "node": "^18 || >=20" - } - }, - "node_modules/swagger-typescript-api/node_modules/typescript": { - "version": "5.8.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz", - "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", - "dev": true, - "license": "Apache-2.0", - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=14.17" - } - }, - "node_modules/swagger2openapi": { - "version": "7.0.8", - "resolved": "https://registry.npmjs.org/swagger2openapi/-/swagger2openapi-7.0.8.tgz", - "integrity": "sha512-upi/0ZGkYgEcLeGieoz8gT74oWHA0E7JivX7aN9mAf+Tc7BQoRBvnIGHoPDw+f9TXTW4s6kGYCZJtauP6OYp7g==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "call-me-maybe": "^1.0.1", - "node-fetch": "^2.6.1", - "node-fetch-h2": "^2.3.0", - "node-readfiles": "^0.2.0", - "oas-kit-common": "^1.0.8", - "oas-resolver": "^2.5.6", - "oas-schema-walker": "^1.1.5", - "oas-validator": "^5.0.8", - "reftools": "^1.1.9", - "yaml": "^1.10.0", - "yargs": "^17.0.1" - }, - "bin": { - "boast": "boast.js", - "oas-validate": "oas-validate.js", - "swagger2openapi": "swagger2openapi.js" - }, - "funding": { - "url": "https://github.com/Mermade/oas-kit?sponsor=1" - } - }, - "node_modules/swagger2openapi/node_modules/cliui": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", - "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.1", - "wrap-ansi": "^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/swagger2openapi/node_modules/escalade": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", - "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/swagger2openapi/node_modules/node-fetch": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", - "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", - "dev": true, - "license": "MIT", - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } - }, - "node_modules/swagger2openapi/node_modules/oas-resolver": { - "version": "2.5.6", - "resolved": "https://registry.npmjs.org/oas-resolver/-/oas-resolver-2.5.6.tgz", - "integrity": "sha512-Yx5PWQNZomfEhPPOphFbZKi9W93CocQj18NlD2Pa4GWZzdZpSJvYwoiuurRI7m3SpcChrnO08hkuQDL3FGsVFQ==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "node-fetch-h2": "^2.3.0", - "oas-kit-common": "^1.0.8", - "reftools": "^1.1.9", - "yaml": "^1.10.0", - "yargs": "^17.0.1" - }, - "bin": { - "resolve": "resolve.js" - }, - "funding": { - "url": "https://github.com/Mermade/oas-kit?sponsor=1" - } - }, - "node_modules/swagger2openapi/node_modules/oas-validator": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/oas-validator/-/oas-validator-5.0.8.tgz", - "integrity": "sha512-cu20/HE5N5HKqVygs3dt94eYJfBi0TsZvPVXDhbXQHiEityDN+RROTleefoKRKKJ9dFAF2JBkDHgvWj0sjKGmw==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "call-me-maybe": "^1.0.1", - "oas-kit-common": "^1.0.8", - "oas-linter": "^3.2.2", - "oas-resolver": "^2.5.6", - "oas-schema-walker": "^1.1.5", - "reftools": "^1.1.9", - "should": "^13.2.1", - "yaml": "^1.10.0" - }, - "funding": { - "url": "https://github.com/Mermade/oas-kit?sponsor=1" - } - }, - "node_modules/swagger2openapi/node_modules/reftools": { - "version": "1.1.9", - "resolved": "https://registry.npmjs.org/reftools/-/reftools-1.1.9.tgz", - "integrity": "sha512-OVede/NQE13xBQ+ob5CKd5KyeJYU2YInb1bmV4nRoOfquZPkAkxuOXicSe1PvqIuZZ4kD13sPKBbR7UFDmli6w==", - "dev": true, - "license": "BSD-3-Clause", - "funding": { - "url": "https://github.com/Mermade/oas-kit?sponsor=1" - } - }, - "node_modules/swagger2openapi/node_modules/wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/swagger2openapi/node_modules/y18n": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", - "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=10" - } - }, - "node_modules/swagger2openapi/node_modules/yaml": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", - "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">= 6" - } - }, - "node_modules/swagger2openapi/node_modules/yargs": { - "version": "17.7.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", - "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", - "dev": true, - "license": "MIT", - "dependencies": { - "cliui": "^8.0.1", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.3", - "y18n": "^5.0.5", - "yargs-parser": "^21.1.1" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/swagger2openapi/node_modules/yargs-parser": { - "version": "21.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", - "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/symbol-tree": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz", - "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==", - "dev": true, - "license": "MIT" - }, - "node_modules/tailwindcss": { - "version": "4.1.11", - "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.11.tgz", - "integrity": "sha512-2E9TBm6MDD/xKYe+dvJZAmg3yxIEDNRc0jwlNyDg/4Fil2QcSLjFKGVff0lAf1jjeaArlG/M75Ey/EYr/OJtBA==", - "license": "MIT" - }, - "node_modules/tapable": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.2.tgz", - "integrity": "sha512-Re10+NauLTMCudc7T5WLFLAwDhQ0JWdrMK+9B2M8zR5hRExKmsRDCBA7/aV/pNJFltmBFO5BAMlQFi/vq3nKOg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/tar": { - "version": "7.4.3", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz", - "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==", - "dev": true, - "license": "ISC", - "dependencies": { - "@isaacs/fs-minipass": "^4.0.0", - "chownr": "^3.0.0", - "minipass": "^7.1.2", - "minizlib": "^3.0.1", - "mkdirp": "^3.0.1", - "yallist": "^5.0.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/through": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", - "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==", - "dev": true, - "license": "MIT" - }, - "node_modules/tinybench": { - "version": "2.9.0", - "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", - "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==", - "dev": true, - "license": "MIT" - }, - "node_modules/tinyexec": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-0.3.2.tgz", - "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==", - "dev": true, - "license": "MIT" - }, - "node_modules/tinyglobby": { - "version": "0.2.14", - "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.14.tgz", - "integrity": "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "fdir": "^6.4.4", - "picomatch": "^4.0.2" - }, - "engines": { - "node": ">=12.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/SuperchupuDev" - } - }, - "node_modules/tinypool": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.1.1.tgz", - "integrity": "sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^18.0.0 || >=20.0.0" - } - }, - "node_modules/tinyrainbow": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-2.0.0.tgz", - "integrity": "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/tinyspy": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.3.tgz", - "integrity": "sha512-t2T/WLB2WRgZ9EpE4jgPJ9w+i66UZfDc8wHh0xrwiRNN+UwH98GIJkTeZqX9rg0i0ptwzqW+uYeIF0T4F8LR7A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/tldts": { - "version": "6.1.86", - "resolved": "https://registry.npmjs.org/tldts/-/tldts-6.1.86.tgz", - "integrity": "sha512-WMi/OQ2axVTf/ykqCQgXiIct+mSQDFdH2fkwhPwgEwvJ1kSzZRiinb0zF2Xb8u4+OqPChmyI6MEu4EezNJz+FQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "tldts-core": "^6.1.86" - }, - "bin": { - "tldts": "bin/cli.js" - } - }, - "node_modules/tldts-core": { - "version": "6.1.86", - "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-6.1.86.tgz", - "integrity": "sha512-Je6p7pkk+KMzMv2XXKmAE3McmolOQFdxkKw0R8EYNr7sELW46JqnNeTX8ybPiQgvg1ymCoF8LXs5fzFaZvJPTA==", - "dev": true, - "license": "MIT" - }, - "node_modules/token-types": { - "version": "6.0.4", - "dev": true, - "license": "MIT", - "dependencies": { - "@tokenizer/token": "^0.3.0", - "ieee754": "^1.2.1" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/Borewit" - } - }, - "node_modules/tough-cookie": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-5.1.2.tgz", - "integrity": "sha512-FVDYdxtnj0G6Qm/DhNPSb8Ju59ULcup3tuJxkFb5K8Bv2pUXILbf0xZWU8PX8Ov19OXljbUyveOFwRMwkXzO+A==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "tldts": "^6.1.32" - }, - "engines": { - "node": ">=16" - } - }, - "node_modules/tr46": { - "version": "0.0.3", - "dev": true, - "license": "MIT" - }, - "node_modules/tree-kill": { - "version": "1.2.2", - "dev": true, - "license": "MIT", - "bin": { - "tree-kill": "cli.js" - } - }, - "node_modules/tslib": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", - "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", - "dev": true, - "license": "0BSD" - }, - "node_modules/type-fest": { - "version": "0.21.3", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", - "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", - "dev": true, - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/typescript": { - "version": "5.9.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.2.tgz", - "integrity": "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==", - "dev": true, - "license": "Apache-2.0", - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=14.17" - } - }, - "node_modules/uid": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/uid/-/uid-2.0.2.tgz", - "integrity": "sha512-u3xV3X7uzvi5b1MncmZo3i2Aw222Zk1keqLA1YkHldREkAhAqi65wuPfe7lHx8H/Wzy+8CE7S7uS3jekIM5s8g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@lukeed/csprng": "^1.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/uint8array-extras": { - "version": "1.4.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/undici-types": { - "version": "7.10.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.10.0.tgz", - "integrity": "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag==", - "dev": true, - "license": "MIT" - }, - "node_modules/universalify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", - "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 10.0.0" - } - }, - "node_modules/util-deprecate": { - "version": "1.0.2", - "license": "MIT" - }, - "node_modules/vite": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/vite/-/vite-7.1.1.tgz", - "integrity": "sha512-yJ+Mp7OyV+4S+afWo+QyoL9jFWD11QFH0i5i7JypnfTcA1rmgxCbiA8WwAICDEtZ1Z1hzrVhN8R8rGTqkTY8ZQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "esbuild": "^0.25.0", - "fdir": "^6.4.6", - "picomatch": "^4.0.3", - "postcss": "^8.5.6", - "rollup": "^4.43.0", - "tinyglobby": "^0.2.14" - }, - "bin": { - "vite": "bin/vite.js" - }, - "engines": { - "node": "^20.19.0 || >=22.12.0" - }, - "funding": { - "url": "https://github.com/vitejs/vite?sponsor=1" - }, - "optionalDependencies": { - "fsevents": "~2.3.3" - }, - "peerDependencies": { - "@types/node": "^20.19.0 || >=22.12.0", - "jiti": ">=1.21.0", - "less": "^4.0.0", - "lightningcss": "^1.21.0", - "sass": "^1.70.0", - "sass-embedded": "^1.70.0", - "stylus": ">=0.54.8", - "sugarss": "^5.0.0", - "terser": "^5.16.0", - "tsx": "^4.8.1", - "yaml": "^2.4.2" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - }, - "jiti": { - "optional": true - }, - "less": { - "optional": true - }, - "lightningcss": { - "optional": true - }, - "sass": { - "optional": true - }, - "sass-embedded": { - "optional": true - }, - "stylus": { - "optional": true - }, - "sugarss": { - "optional": true - }, - "terser": { - "optional": true - }, - "tsx": { - "optional": true - }, - "yaml": { - "optional": true - } - } - }, - "node_modules/vite-node": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.2.4.tgz", - "integrity": "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==", - "dev": true, - "license": "MIT", - "dependencies": { - "cac": "^6.7.14", - "debug": "^4.4.1", - "es-module-lexer": "^1.7.0", - "pathe": "^2.0.3", - "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" - }, - "bin": { - "vite-node": "vite-node.mjs" - }, - "engines": { - "node": "^18.0.0 || ^20.0.0 || >=22.0.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/vite/node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.46.2.tgz", - "integrity": "sha512-Zj3Hl6sN34xJtMv7Anwb5Gu01yujyE/cLBDB2gnHTAHaWS1Z38L7kuSG+oAh0giZMqG060f/YBStXtMH6FvPMA==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ] - }, - "node_modules/vite/node_modules/@rollup/rollup-android-arm64": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.46.2.tgz", - "integrity": "sha512-nTeCWY83kN64oQ5MGz3CgtPx8NSOhC5lWtsjTs+8JAJNLcP3QbLCtDDgUKQc/Ro/frpMq4SHUaHN6AMltcEoLQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ] - }, - "node_modules/vite/node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.46.2.tgz", - "integrity": "sha512-HV7bW2Fb/F5KPdM/9bApunQh68YVDU8sO8BvcW9OngQVN3HHHkw99wFupuUJfGR9pYLLAjcAOA6iO+evsbBaPQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/vite/node_modules/@rollup/rollup-darwin-x64": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.46.2.tgz", - "integrity": "sha512-SSj8TlYV5nJixSsm/y3QXfhspSiLYP11zpfwp6G/YDXctf3Xkdnk4woJIF5VQe0of2OjzTt8EsxnJDCdHd2xMA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/vite/node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.46.2.tgz", - "integrity": "sha512-ZyrsG4TIT9xnOlLsSSi9w/X29tCbK1yegE49RYm3tu3wF1L/B6LVMqnEWyDB26d9Ecx9zrmXCiPmIabVuLmNSg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ] - }, - "node_modules/vite/node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.46.2.tgz", - "integrity": "sha512-pCgHFoOECwVCJ5GFq8+gR8SBKnMO+xe5UEqbemxBpCKYQddRQMgomv1104RnLSg7nNvgKy05sLsY51+OVRyiVw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ] - }, - "node_modules/vite/node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.46.2.tgz", - "integrity": "sha512-EtP8aquZ0xQg0ETFcxUbU71MZlHaw9MChwrQzatiE8U/bvi5uv/oChExXC4mWhjiqK7azGJBqU0tt5H123SzVA==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/vite/node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.46.2.tgz", - "integrity": "sha512-qO7F7U3u1nfxYRPM8HqFtLd+raev2K137dsV08q/LRKRLEc7RsiDWihUnrINdsWQxPR9jqZ8DIIZ1zJJAm5PjQ==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/vite/node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.46.2.tgz", - "integrity": "sha512-3dRaqLfcOXYsfvw5xMrxAk9Lb1f395gkoBYzSFcc/scgRFptRXL9DOaDpMiehf9CO8ZDRJW2z45b6fpU5nwjng==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/vite/node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.46.2.tgz", - "integrity": "sha512-fhHFTutA7SM+IrR6lIfiHskxmpmPTJUXpWIsBXpeEwNgZzZZSg/q4i6FU4J8qOGyJ0TR+wXBwx/L7Ho9z0+uDg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/vite/node_modules/@rollup/rollup-linux-loongarch64-gnu": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.46.2.tgz", - "integrity": "sha512-i7wfGFXu8x4+FRqPymzjD+Hyav8l95UIZ773j7J7zRYc3Xsxy2wIn4x+llpunexXe6laaO72iEjeeGyUFmjKeA==", - "cpu": [ - "loong64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/vite/node_modules/@rollup/rollup-linux-ppc64-gnu": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.46.2.tgz", - "integrity": "sha512-B/l0dFcHVUnqcGZWKcWBSV2PF01YUt0Rvlurci5P+neqY/yMKchGU8ullZvIv5e8Y1C6wOn+U03mrDylP5q9Yw==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/vite/node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.46.2.tgz", - "integrity": "sha512-32k4ENb5ygtkMwPMucAb8MtV8olkPT03oiTxJbgkJa7lJ7dZMr0GCFJlyvy+K8iq7F/iuOr41ZdUHaOiqyR3iQ==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/vite/node_modules/@rollup/rollup-linux-riscv64-musl": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.46.2.tgz", - "integrity": "sha512-t5B2loThlFEauloaQkZg9gxV05BYeITLvLkWOkRXogP4qHXLkWSbSHKM9S6H1schf/0YGP/qNKtiISlxvfmmZw==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/vite/node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.46.2.tgz", - "integrity": "sha512-YKjekwTEKgbB7n17gmODSmJVUIvj8CX7q5442/CK80L8nqOUbMtf8b01QkG3jOqyr1rotrAnW6B/qiHwfcuWQA==", - "cpu": [ - "s390x" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/vite/node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.46.2.tgz", - "integrity": "sha512-7kX69DIrBeD7yNp4A5b81izs8BqoZkCIaxQaOpumcJ1S/kmqNFjPhDu1LHeVXv0SexfHQv5cqHsxLOjETuqDuA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/vite/node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.46.2.tgz", - "integrity": "sha512-wiJWMIpeaak/jsbaq2HMh/rzZxHVW1rU6coyeNNpMwk5isiPjSTx0a4YLSlYDwBH/WBvLz+EtsNqQScZTLJy3g==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/vite/node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.46.2.tgz", - "integrity": "sha512-gBgaUDESVzMgWZhcyjfs9QFK16D8K6QZpwAaVNJxYDLHWayOta4ZMjGm/vsAEy3hvlS2GosVFlBlP9/Wb85DqQ==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/vite/node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.46.2.tgz", - "integrity": "sha512-CvUo2ixeIQGtF6WvuB87XWqPQkoFAFqW+HUo/WzHwuHDvIwZCtjdWXoYCcr06iKGydiqTclC4jU/TNObC/xKZg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/vite/node_modules/fsevents": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/vite/node_modules/rollup": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.46.2.tgz", - "integrity": "sha512-WMmLFI+Boh6xbop+OAGo9cQ3OgX9MIg7xOQjn+pTCwOkk+FNDAeAemXkJ3HzDJrVXleLOFVa1ipuc1AmEx1Dwg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/estree": "1.0.8" - }, - "bin": { - "rollup": "dist/bin/rollup" - }, - "engines": { - "node": ">=18.0.0", - "npm": ">=8.0.0" - }, - "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.46.2", - "@rollup/rollup-android-arm64": "4.46.2", - "@rollup/rollup-darwin-arm64": "4.46.2", - "@rollup/rollup-darwin-x64": "4.46.2", - "@rollup/rollup-freebsd-arm64": "4.46.2", - "@rollup/rollup-freebsd-x64": "4.46.2", - "@rollup/rollup-linux-arm-gnueabihf": "4.46.2", - "@rollup/rollup-linux-arm-musleabihf": "4.46.2", - "@rollup/rollup-linux-arm64-gnu": "4.46.2", - "@rollup/rollup-linux-arm64-musl": "4.46.2", - "@rollup/rollup-linux-loongarch64-gnu": "4.46.2", - "@rollup/rollup-linux-ppc64-gnu": "4.46.2", - "@rollup/rollup-linux-riscv64-gnu": "4.46.2", - "@rollup/rollup-linux-riscv64-musl": "4.46.2", - "@rollup/rollup-linux-s390x-gnu": "4.46.2", - "@rollup/rollup-linux-x64-gnu": "4.46.2", - "@rollup/rollup-linux-x64-musl": "4.46.2", - "@rollup/rollup-win32-arm64-msvc": "4.46.2", - "@rollup/rollup-win32-ia32-msvc": "4.46.2", - "@rollup/rollup-win32-x64-msvc": "4.46.2", - "fsevents": "~2.3.2" - } - }, - "node_modules/vitefu": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/vitefu/-/vitefu-1.1.1.tgz", - "integrity": "sha512-B/Fegf3i8zh0yFbpzZ21amWzHmuNlLlmJT6n7bu5e+pCHUKQIfXSYokrqOBGEMMe9UG2sostKQF9mml/vYaWJQ==", - "dev": true, - "license": "MIT", - "workspaces": [ - "tests/deps/*", - "tests/projects/*", - "tests/projects/workspace/packages/*" - ], - "peerDependencies": { - "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0-beta.0" - }, - "peerDependenciesMeta": { - "vite": { - "optional": true - } - } - }, - "node_modules/vitest": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.2.4.tgz", - "integrity": "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/chai": "^5.2.2", - "@vitest/expect": "3.2.4", - "@vitest/mocker": "3.2.4", - "@vitest/pretty-format": "^3.2.4", - "@vitest/runner": "3.2.4", - "@vitest/snapshot": "3.2.4", - "@vitest/spy": "3.2.4", - "@vitest/utils": "3.2.4", - "chai": "^5.2.0", - "debug": "^4.4.1", - "expect-type": "^1.2.1", - "magic-string": "^0.30.17", - "pathe": "^2.0.3", - "picomatch": "^4.0.2", - "std-env": "^3.9.0", - "tinybench": "^2.9.0", - "tinyexec": "^0.3.2", - "tinyglobby": "^0.2.14", - "tinypool": "^1.1.1", - "tinyrainbow": "^2.0.0", - "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0", - "vite-node": "3.2.4", - "why-is-node-running": "^2.3.0" - }, - "bin": { - "vitest": "vitest.mjs" - }, - "engines": { - "node": "^18.0.0 || ^20.0.0 || >=22.0.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - }, - "peerDependencies": { - "@edge-runtime/vm": "*", - "@types/debug": "^4.1.12", - "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", - "@vitest/browser": "3.2.4", - "@vitest/ui": "3.2.4", - "happy-dom": "*", - "jsdom": "*" - }, - "peerDependenciesMeta": { - "@edge-runtime/vm": { - "optional": true - }, - "@types/debug": { - "optional": true - }, - "@types/node": { - "optional": true - }, - "@vitest/browser": { - "optional": true - }, - "@vitest/ui": { - "optional": true - }, - "happy-dom": { - "optional": true - }, - "jsdom": { - "optional": true - } - } - }, - "node_modules/w3c-xmlserializer": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-5.0.0.tgz", - "integrity": "sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA==", - "dev": true, - "license": "MIT", - "dependencies": { - "xml-name-validator": "^5.0.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/wcwidth": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "defaults": "^1.0.3" - } - }, - "node_modules/webidl-conversions": { - "version": "3.0.1", - "dev": true, - "license": "BSD-2-Clause" - }, - "node_modules/whatwg-encoding": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-3.1.1.tgz", - "integrity": "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "iconv-lite": "0.6.3" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/whatwg-mimetype": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz", - "integrity": "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - } - }, - "node_modules/whatwg-url": { - "version": "5.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "node_modules/which": { - "version": "2.0.2", - "dev": true, - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/why-is-node-running": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz", - "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==", - "dev": true, - "license": "MIT", - "dependencies": { - "siginfo": "^2.0.0", - "stackback": "0.0.2" - }, - "bin": { - "why-is-node-running": "cli.js" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", - "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs": { - "name": "wrap-ansi", - "version": "7.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/ws": { - "version": "8.18.3", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz", - "integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10.0.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": ">=5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - }, - "node_modules/xml-name-validator": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-5.0.0.tgz", - "integrity": "sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">=18" - } - }, - "node_modules/xmlchars": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz", - "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==", - "dev": true, - "license": "MIT" - }, - "node_modules/yallist": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", - "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==", - "dev": true, - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=18" - } - }, - "node_modules/zimmerframe": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/zimmerframe/-/zimmerframe-1.1.2.tgz", - "integrity": "sha512-rAbqEGa8ovJy4pyBxZM70hg4pE6gDgaQ0Sl9M3enG3I0d6H4XSAM3GeNGLKnsBpuijUow064sf7ww1nutC5/3w==", - "dev": true, - "license": "MIT" - } - } -} diff --git a/webapp/package.json b/webapp/package.json deleted file mode 100644 index cdaaca9c..00000000 --- a/webapp/package.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "name": "garm-webapp", - "version": "0.0.1", - "private": true, - "scripts": { - "dev": "NODE_ENV=development vite dev --host 0.0.0.0 --port 5173", - "build": "vite build", - "preview": "vite preview", - "check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json", - "check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch", - "test": "vitest", - "test:run": "vitest run", - "test:ui": "vitest --ui" - }, - "devDependencies": { - "@openapitools/openapi-generator-cli": "^2.22.0", - "@playwright/test": "^1.54.2", - "@sveltejs/adapter-static": "^3.0.1", - "@sveltejs/kit": "^2.0.0", - "@sveltejs/vite-plugin-svelte": "^6.1.0", - "@tailwindcss/forms": "^0.5.7", - "@tailwindcss/postcss": "^4.1.11", - "@testing-library/jest-dom": "^6.7.0", - "@testing-library/svelte": "^5.2.0-next.3", - "@testing-library/user-event": "^14.6.1", - "@types/node": "^24.2.0", - "@vitest/ui": "^3.2.4", - "autoprefixer": "^10.4.16", - "happy-dom": "^18.0.1", - "jsdom": "^26.1.0", - "postcss": "^8.4.32", - "svelte": "^5.38.0", - "svelte-check": "^4.3.1", - "swagger-typescript-api": "^13.2.7", - "tailwindcss": "^4.1.11", - "typescript": "^5.0.0", - "vite": "^7.1.1", - "vitest": "^3.2.4" - }, - "type": "module", - "dependencies": { - "@codemirror/lang-json": "^6.0.2", - "@codemirror/state": "^6.5.2", - "@codemirror/theme-one-dark": "^6.1.3", - "@codemirror/view": "^6.38.1", - "@tailwindcss/typography": "^0.5.10", - "codemirror": "^6.0.2", - "cookie": "^0.7.0" - }, - "overrides": { - "cookie": "^0.7.0" - }, - "description": "", - "main": "postcss.config.js", - "keywords": [], - "author": "", - "license": "ISC" -} diff --git a/webapp/postcss.config.js b/webapp/postcss.config.js deleted file mode 100644 index 571e3e11..00000000 --- a/webapp/postcss.config.js +++ /dev/null @@ -1,6 +0,0 @@ -export default { - plugins: { - '@tailwindcss/postcss': {}, - autoprefixer: {} - } -}; \ No newline at end of file diff --git a/webapp/src/app.css b/webapp/src/app.css deleted file mode 100644 index 5b1d339f..00000000 --- a/webapp/src/app.css +++ /dev/null @@ -1,18 +0,0 @@ -@import "tailwindcss"; - -@theme { - --breakpoint-sm: 640px; - --breakpoint-md: 768px; - --breakpoint-lg: 1024px; - --breakpoint-xl: 1280px; - --breakpoint-2xl: 1536px; -} - -/* Configure dark mode to use class strategy in Tailwind v4 */ -@variant dark (.dark &); - -@layer base { - html { - font-family: ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", sans-serif; - } -} \ No newline at end of file diff --git a/webapp/src/app.d.ts b/webapp/src/app.d.ts deleted file mode 100644 index f451d6bf..00000000 --- a/webapp/src/app.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -declare global { - namespace App { - interface Error {} - interface Locals {} - interface PageData {} - interface Platform {} - } -} - -export {}; \ No newline at end of file diff --git a/webapp/src/app.html b/webapp/src/app.html deleted file mode 100644 index 44b27308..00000000 --- a/webapp/src/app.html +++ /dev/null @@ -1,78 +0,0 @@ - - - - - - - - - - - - - %sveltekit.head% - - -
                %sveltekit.body%
                - - diff --git a/webapp/src/integration/pool-creation-anti-duplication.test.ts b/webapp/src/integration/pool-creation-anti-duplication.test.ts deleted file mode 100644 index 126ddcad..00000000 --- a/webapp/src/integration/pool-creation-anti-duplication.test.ts +++ /dev/null @@ -1,200 +0,0 @@ -/** - * Integration tests to prevent duplicate pool creation issue - * - * These tests verify that: - * 1. Entity detail pages don't make duplicate API calls - * 2. Global pools page handles creation correctly - * 3. The conditional logic in CreatePoolModal works as expected - */ -import { describe, it, expect, vi, beforeEach } from 'vitest'; - -// Core test: Verify the conditional logic exists and works -describe('Pool Creation Anti-Duplication Integration', () => { - beforeEach(() => { - vi.clearAllMocks(); - }); - - describe('Conditional Logic Verification', () => { - it('should have conditional API call logic based on initialEntityType', async () => { - // Mock the CreatePoolModal to test its conditional logic - const mockCreatePoolModal = await import('$lib/components/CreatePoolModal.svelte'); - - // This test verifies that the modal component has the logic to decide - // whether to make API calls or let parent components handle them - expect(mockCreatePoolModal).toBeDefined(); - }); - - it('should prevent duplicate pool creation through architecture', () => { - // The architecture prevents duplication by: - // 1. Entity pages: Parent handles API calls, modal just validates and dispatches - // 2. Global page: Modal handles API calls since no parent entity context - - const scenarios = [ - { - name: 'Repository detail page', - hasInitialEntity: true, - expectedAPICallLocation: 'parent' - }, - { - name: 'Organization detail page', - hasInitialEntity: true, - expectedAPICallLocation: 'parent' - }, - { - name: 'Enterprise detail page', - hasInitialEntity: true, - expectedAPICallLocation: 'parent' - }, - { - name: 'Global pools page', - hasInitialEntity: false, - expectedAPICallLocation: 'modal' - } - ]; - - scenarios.forEach(scenario => { - if (scenario.hasInitialEntity) { - // Entity pages: Modal should NOT make API calls - expect(scenario.expectedAPICallLocation).toBe('parent'); - } else { - // Global page: Modal SHOULD make API calls - expect(scenario.expectedAPICallLocation).toBe('modal'); - } - }); - }); - }); - - describe('API Call Prevention Rules', () => { - it('should follow the rule: one source of truth per scenario', () => { - const rules = { - 'entity-detail-page': { - modalMakesAPICall: false, - parentMakesAPICall: true, - reason: 'Entity is pre-known, parent handles creation' - }, - 'global-pools-page': { - modalMakesAPICall: true, - parentMakesAPICall: false, - reason: 'No pre-selected entity, modal handles everything' - } - }; - - // Verify rules are consistent - Object.values(rules).forEach(rule => { - // Each scenario should have exactly one source making API calls - const apiCallSources = [rule.modalMakesAPICall, rule.parentMakesAPICall]; - const activeSourcesCount = apiCallSources.filter(Boolean).length; - expect(activeSourcesCount).toBe(1); - }); - }); - - it('should prevent race conditions through sequential handling', () => { - // The fix ensures: - // 1. Only one component makes the API call - // 2. Success/error handling is centralized - // 3. No race conditions between modal and parent - - const preventionMechanisms = { - conditionalAPICall: 'Modal checks initialEntityType props', - singleSubmitEvent: 'Only one submit event dispatched', - clearResponsibility: 'Each component has defined role' - }; - - Object.entries(preventionMechanisms).forEach(([mechanism, description]) => { - expect(description).toContain(mechanism === 'conditionalAPICall' ? 'Modal checks' : 'one'); - }); - }); - }); - - describe('Error Handling Consistency', () => { - it('should handle errors appropriately per scenario', () => { - const errorHandling = { - 'entity-page-api-error': { - handledBy: 'parent', - action: 'show toast, keep modal open', - apiCallMadeBy: 'parent' - }, - 'global-page-api-error': { - handledBy: 'modal', - action: 'show error in modal', - apiCallMadeBy: 'modal' - } - }; - - Object.entries(errorHandling).forEach(([scenario, handling]) => { - // Error should be handled by the same component that made the API call - expect(handling.handledBy).toBe(handling.apiCallMadeBy); - }); - }); - }); - - describe('Regression Prevention', () => { - it('should prevent the specific duplicate issue that was fixed', () => { - // The original bug: Both modal AND parent were calling createRepositoryPool - // The fix: Only parent calls API when initialEntityType is provided - - const originalBug = { - description: 'Both modal and parent called createRepositoryPool', - symptoms: 'Two identical pools created', - rootCause: 'No conditional logic in modal submission' - }; - - const fix = { - description: 'Conditional API calls based on initialEntityType', - prevention: 'Only one component makes API call per scenario', - verification: 'Unit tests verify API call counts' - }; - - // Verify the fix addresses the root cause - expect(fix.description).toContain('Conditional'); - expect(fix.prevention).toContain('one component'); - expect(originalBug.rootCause).toContain('No conditional logic'); - }); - - it('should maintain backward compatibility', () => { - // The fix should not break existing functionality - const compatibility = { - globalPoolsPage: 'Still works, modal handles creation', - entityDetailPages: 'Still works, parent handles creation', - modalInterface: 'Still works with same props and events', - apiInterface: 'Still works with same API calls, just different caller' - }; - - Object.values(compatibility).forEach(requirement => { - expect(requirement).toContain('works'); - }); - }); - }); - - describe('Future Duplication Prevention', () => { - it('should have clear patterns for adding new entity types', () => { - // When adding new entity types, developers should follow: - const patterns = { - modalLogic: 'Add new case to entity type switch statement', - parentHandler: 'Create handleCreatePool function in parent', - conditionalCheck: 'Use initialEntityType to determine API caller', - errorHandling: 'Handle errors in the component making API call' - }; - - // These patterns prevent accidental duplication - Object.values(patterns).forEach(pattern => { - expect(pattern).toBeDefined(); - }); - }); - - it('should make it easy to identify API call responsibility', () => { - // Clear responsibility matrix - const responsibilities = { - 'CreatePoolModal with initialEntityType': 'Validate form, dispatch event', - 'CreatePoolModal without initialEntityType': 'Validate form, make API call', - 'Parent with CreatePoolModal (entity page)': 'Handle API call and success/error', - 'Parent with CreatePoolModal (global page)': 'Handle success message only' - }; - - // Each scenario has clear responsibility - Object.values(responsibilities).forEach(responsibility => { - expect(responsibility).toMatch(/^(Validate|Handle|dispatch)/); - }); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/lib/api/client.ts b/webapp/src/lib/api/client.ts deleted file mode 100644 index 4ceea8df..00000000 --- a/webapp/src/lib/api/client.ts +++ /dev/null @@ -1,77 +0,0 @@ -// Importing from the generated client wrapper -import { - GeneratedGarmApiClient, - type Repository, - type Organization, - type Enterprise, - type Endpoint, - type Pool, - type ScaleSet, - type Instance, - type ForgeCredentials, - type Provider, - type ControllerInfo, - type CreateRepoParams, - type CreateOrgParams, - type CreateEnterpriseParams, - type CreatePoolParams, - type CreateScaleSetParams, - type UpdateEntityParams, - type UpdatePoolParams, - type LoginRequest, - type LoginResponse, -} from './generated-client.js'; - -// Import endpoint and credentials types directly -import type { - CreateGithubEndpointParams as CreateEndpointParams, - UpdateGithubEndpointParams as UpdateEndpointParams, - CreateGithubCredentialsParams as CreateCredentialsParams, - UpdateGithubCredentialsParams as UpdateCredentialsParams, -} from './generated/api'; - -// Re-export types for compatibility -export type { - Repository, - Organization, - Enterprise, - Endpoint, - Pool, - ScaleSet, - Instance, - ForgeCredentials, - Provider, - ControllerInfo, - CreateRepoParams, - CreateOrgParams, - CreateEnterpriseParams, - CreateEndpointParams, - UpdateEndpointParams, - CreateCredentialsParams, - UpdateCredentialsParams, - CreatePoolParams, - CreateScaleSetParams, - UpdateEntityParams, - UpdatePoolParams, - LoginRequest, - LoginResponse, -}; - -// Legacy APIError type for backward compatibility -export interface APIError { - error: string; - details?: string; -} - -// GarmApiClient now extends/wraps the generated client -export class GarmApiClient extends GeneratedGarmApiClient { - constructor(baseUrl: string = '') { - super(baseUrl); - } - - // All methods are inherited from GeneratedGarmApiClient - // This class now acts as a simple wrapper for backward compatibility -} - -// Create a singleton instance -export const garmApi = new GarmApiClient(); diff --git a/webapp/src/lib/api/generated-client.ts b/webapp/src/lib/api/generated-client.ts deleted file mode 100644 index ce45d28b..00000000 --- a/webapp/src/lib/api/generated-client.ts +++ /dev/null @@ -1,596 +0,0 @@ -// Generated API Client Wrapper for GARM -// This wraps the auto-generated OpenAPI client to match our existing interface - -import { - LoginApi, - ControllerInfoApi, - ControllerApi, - EndpointsApi, - CredentialsApi, - RepositoriesApi, - OrganizationsApi, - EnterprisesApi, - PoolsApi, - ScalesetsApi, - InstancesApi, - ProvidersApi, - FirstRunApi, - HooksApi, - type Repository, - type Organization, - type Enterprise, - type ForgeEndpoint, - type Pool, - type ScaleSet, - type Instance, - type ForgeCredentials, - type Provider, - type ControllerInfo, - type CreateRepoParams, - type CreateOrgParams, - type CreateEnterpriseParams, - type CreateGithubEndpointParams, - type CreateGiteaEndpointParams, - type UpdateGithubEndpointParams, - type UpdateGiteaEndpointParams, - type CreateGithubCredentialsParams, - type CreateGiteaCredentialsParams, - type UpdateGithubCredentialsParams, - type UpdateGiteaCredentialsParams, - type CreatePoolParams, - type CreateScaleSetParams, - type UpdateEntityParams, - type UpdatePoolParams, - type PasswordLoginParams, - type JWTResponse, - type NewUserParams, - type User, - type UpdateControllerParams, - type HookInfo, - Configuration -} from './generated/index'; - -// Re-export types for compatibility -export type { - Repository, - Organization, - Enterprise, - ForgeEndpoint as Endpoint, - Pool, - ScaleSet, - Instance, - ForgeCredentials, - Provider, - ControllerInfo, - CreateRepoParams, - CreateOrgParams, - CreateEnterpriseParams, - CreateGithubEndpointParams as CreateEndpointParams, - UpdateGithubEndpointParams as UpdateEndpointParams, - CreateGithubCredentialsParams as CreateCredentialsParams, - UpdateGithubCredentialsParams as UpdateCredentialsParams, - CreatePoolParams, - CreateScaleSetParams, - UpdateEntityParams, - UpdatePoolParams, - PasswordLoginParams, - JWTResponse, - NewUserParams, - User, - UpdateControllerParams, -}; - -// Define common request types for compatibility -export interface LoginRequest { - username: string; - password: string; -} - -export interface LoginResponse { - token: string; -} - -export class GeneratedGarmApiClient { - private baseUrl: string; - private token?: string; - private config: Configuration; - - // Check if we're in development mode (cross-origin setup) - private isDevelopmentMode(): boolean { - if (typeof window === 'undefined') return false; - // Development mode: either VITE_GARM_API_URL is set OR we detect cross-origin - return !!(import.meta.env.VITE_GARM_API_URL) || window.location.port === '5173'; - } - - // Generated API client instances - private loginApi: LoginApi; - private controllerInfoApi: ControllerInfoApi; - private controllerApi: ControllerApi; - private endpointsApi: EndpointsApi; - private credentialsApi: CredentialsApi; - private repositoriesApi: RepositoriesApi; - private organizationsApi: OrganizationsApi; - private enterprisesApi: EnterprisesApi; - private poolsApi: PoolsApi; - private scaleSetsApi: ScalesetsApi; - private instancesApi: InstancesApi; - private providersApi: ProvidersApi; - private firstRunApi: FirstRunApi; - private hooksApi: HooksApi; - - constructor(baseUrl: string = '') { - this.baseUrl = baseUrl || window.location.origin; - - // Create configuration for the generated client - const isDevMode = this.isDevelopmentMode(); - this.config = new Configuration({ - basePath: `${this.baseUrl}/api/v1`, - accessToken: () => this.token || '', - baseOptions: { - // In development mode, don't send cookies (use Bearer token only) - // In production mode, include cookies for authentication - withCredentials: !isDevMode, - }, - }); - - // Initialize generated API clients - this.loginApi = new LoginApi(this.config); - this.controllerInfoApi = new ControllerInfoApi(this.config); - this.controllerApi = new ControllerApi(this.config); - this.endpointsApi = new EndpointsApi(this.config); - this.credentialsApi = new CredentialsApi(this.config); - this.repositoriesApi = new RepositoriesApi(this.config); - this.organizationsApi = new OrganizationsApi(this.config); - this.enterprisesApi = new EnterprisesApi(this.config); - this.poolsApi = new PoolsApi(this.config); - this.scaleSetsApi = new ScalesetsApi(this.config); - this.instancesApi = new InstancesApi(this.config); - this.providersApi = new ProvidersApi(this.config); - this.firstRunApi = new FirstRunApi(this.config); - this.hooksApi = new HooksApi(this.config); - } - - // Set authentication token - setToken(token: string) { - this.token = token; - - // Update configuration for all clients - const isDevMode = this.isDevelopmentMode(); - this.config = new Configuration({ - basePath: `${this.baseUrl}/api/v1`, - accessToken: () => token, - baseOptions: { - // In development mode, don't send cookies (use Bearer token only) - // In production mode, include cookies for authentication - withCredentials: !isDevMode, - }, - }); - - // Recreate all API instances with new config - this.loginApi = new LoginApi(this.config); - this.controllerInfoApi = new ControllerInfoApi(this.config); - this.controllerApi = new ControllerApi(this.config); - this.endpointsApi = new EndpointsApi(this.config); - this.credentialsApi = new CredentialsApi(this.config); - this.repositoriesApi = new RepositoriesApi(this.config); - this.organizationsApi = new OrganizationsApi(this.config); - this.enterprisesApi = new EnterprisesApi(this.config); - this.poolsApi = new PoolsApi(this.config); - this.scaleSetsApi = new ScalesetsApi(this.config); - this.instancesApi = new InstancesApi(this.config); - this.providersApi = new ProvidersApi(this.config); - this.firstRunApi = new FirstRunApi(this.config); - this.hooksApi = new HooksApi(this.config); - } - - // Authentication - async login(credentials: LoginRequest): Promise { - const params: PasswordLoginParams = { - username: credentials.username, - password: credentials.password, - }; - const response = await this.loginApi.login(params); - const token = response.data.token; - if (token) { - this.setToken(token); - return { token }; - } - throw new Error('Login failed'); - } - - async getControllerInfo(): Promise { - const response = await this.controllerInfoApi.controllerInfo(); - return response.data; - } - - // GitHub Endpoints - async listGithubEndpoints(): Promise { - const response = await this.endpointsApi.listGithubEndpoints(); - return response.data || []; - } - - async getGithubEndpoint(name: string): Promise { - const response = await this.endpointsApi.getGithubEndpoint(name); - return response.data; - } - - async createGithubEndpoint(params: CreateGithubEndpointParams): Promise { - const response = await this.endpointsApi.createGithubEndpoint(params); - return response.data; - } - - async updateGithubEndpoint(name: string, params: UpdateGithubEndpointParams): Promise { - const response = await this.endpointsApi.updateGithubEndpoint(name, params); - return response.data; - } - - async deleteGithubEndpoint(name: string): Promise { - await this.endpointsApi.deleteGithubEndpoint(name); - } - - // Gitea Endpoints - async listGiteaEndpoints(): Promise { - const response = await this.endpointsApi.listGiteaEndpoints(); - return response.data || []; - } - - async getGiteaEndpoint(name: string): Promise { - const response = await this.endpointsApi.getGiteaEndpoint(name); - return response.data; - } - - async createGiteaEndpoint(params: CreateGiteaEndpointParams): Promise { - const response = await this.endpointsApi.createGiteaEndpoint(params); - return response.data; - } - - async updateGiteaEndpoint(name: string, params: UpdateGiteaEndpointParams): Promise { - const response = await this.endpointsApi.updateGiteaEndpoint(name, params); - return response.data; - } - - async deleteGiteaEndpoint(name: string): Promise { - await this.endpointsApi.deleteGiteaEndpoint(name); - } - - // Combined Endpoints helper - async listAllEndpoints(): Promise { - const [githubEndpoints, giteaEndpoints] = await Promise.all([ - this.listGithubEndpoints().catch(() => []), - this.listGiteaEndpoints().catch(() => []) - ]); - - return [ - ...githubEndpoints.map(ep => ({ ...ep, endpoint_type: 'github' as const })), - ...giteaEndpoints.map(ep => ({ ...ep, endpoint_type: 'gitea' as const })) - ]; - } - - // GitHub Credentials - async listGithubCredentials(): Promise { - const response = await this.credentialsApi.listCredentials(); - return response.data || []; - } - - async getGithubCredentials(id: number): Promise { - const response = await this.credentialsApi.getCredentials(id); - return response.data; - } - - async createGithubCredentials(params: CreateGithubCredentialsParams): Promise { - const response = await this.credentialsApi.createCredentials(params); - return response.data; - } - - async updateGithubCredentials(id: number, params: UpdateGithubCredentialsParams): Promise { - const response = await this.credentialsApi.updateCredentials(id, params); - return response.data; - } - - async deleteGithubCredentials(id: number): Promise { - await this.credentialsApi.deleteCredentials(id); - } - - // Gitea Credentials - async listGiteaCredentials(): Promise { - const response = await this.credentialsApi.listGiteaCredentials(); - return response.data || []; - } - - async getGiteaCredentials(id: number): Promise { - const response = await this.credentialsApi.getGiteaCredentials(id); - return response.data; - } - - async createGiteaCredentials(params: CreateGiteaCredentialsParams): Promise { - const response = await this.credentialsApi.createGiteaCredentials(params); - return response.data; - } - - async updateGiteaCredentials(id: number, params: UpdateGiteaCredentialsParams): Promise { - const response = await this.credentialsApi.updateGiteaCredentials(id, params); - return response.data; - } - - async deleteGiteaCredentials(id: number): Promise { - await this.credentialsApi.deleteGiteaCredentials(id); - } - - // Combined Credentials helper - async listAllCredentials(): Promise { - const [githubCredentials, giteaCredentials] = await Promise.all([ - this.listGithubCredentials().catch(() => []), - this.listGiteaCredentials().catch(() => []) - ]); - - return [...githubCredentials, ...giteaCredentials]; - } - - // Repositories - async installRepositoryWebhook(repoId: string, params: any = {}): Promise { - await this.repositoriesApi.installRepoWebhook(repoId, params); - } - - async uninstallRepositoryWebhook(repoId: string): Promise { - await this.hooksApi.uninstallRepoWebhook(repoId); - } - - async getRepositoryWebhookInfo(repoId: string): Promise { - const response = await this.hooksApi.getRepoWebhookInfo(repoId); - return response.data; - } - async listRepositories(): Promise { - const response = await this.repositoriesApi.listRepos(); - return response.data || []; - } - - async getRepository(id: string): Promise { - const response = await this.repositoriesApi.getRepo(id); - return response.data; - } - - async createRepository(params: CreateRepoParams): Promise { - const response = await this.repositoriesApi.createRepo(params); - return response.data; - } - - async updateRepository(id: string, params: UpdateEntityParams): Promise { - const response = await this.repositoriesApi.updateRepo(id, params); - return response.data; - } - - async deleteRepository(id: string): Promise { - await this.repositoriesApi.deleteRepo(id); - } - - async installRepoWebhook(id: string): Promise { - await this.repositoriesApi.installRepoWebhook(id, {}); - } - - async listRepositoryPools(id: string): Promise { - const response = await this.repositoriesApi.listRepoPools(id); - return response.data || []; - } - - async listRepositoryInstances(id: string): Promise { - const response = await this.repositoriesApi.listRepoInstances(id); - return response.data || []; - } - - async createRepositoryPool(id: string, params: CreatePoolParams): Promise { - const response = await this.repositoriesApi.createRepoPool(id, params); - return response.data; - } - - // Organizations - async installOrganizationWebhook(orgId: string, params: any = {}): Promise { - await this.organizationsApi.installOrgWebhook(orgId, params); - } - - async uninstallOrganizationWebhook(orgId: string): Promise { - await this.hooksApi.uninstallOrgWebhook(orgId); - } - - async getOrganizationWebhookInfo(orgId: string): Promise { - const response = await this.hooksApi.getOrgWebhookInfo(orgId); - return response.data; - } - async listOrganizations(): Promise { - const response = await this.organizationsApi.listOrgs(); - return response.data || []; - } - - async getOrganization(id: string): Promise { - const response = await this.organizationsApi.getOrg(id); - return response.data; - } - - async createOrganization(params: CreateOrgParams): Promise { - const response = await this.organizationsApi.createOrg(params); - return response.data; - } - - async updateOrganization(id: string, params: UpdateEntityParams): Promise { - const response = await this.organizationsApi.updateOrg(id, params); - return response.data; - } - - async deleteOrganization(id: string): Promise { - await this.organizationsApi.deleteOrg(id); - } - - async listOrganizationPools(id: string): Promise { - const response = await this.organizationsApi.listOrgPools(id); - return response.data || []; - } - - async listOrganizationInstances(id: string): Promise { - const response = await this.organizationsApi.listOrgInstances(id); - return response.data || []; - } - - async createOrganizationPool(id: string, params: CreatePoolParams): Promise { - const response = await this.organizationsApi.createOrgPool(id, params); - return response.data; - } - - // Enterprises - async listEnterprises(): Promise { - const response = await this.enterprisesApi.listEnterprises(); - return response.data || []; - } - - async getEnterprise(id: string): Promise { - const response = await this.enterprisesApi.getEnterprise(id); - return response.data; - } - - async createEnterprise(params: CreateEnterpriseParams): Promise { - const response = await this.enterprisesApi.createEnterprise(params); - return response.data; - } - - async updateEnterprise(id: string, params: UpdateEntityParams): Promise { - const response = await this.enterprisesApi.updateEnterprise(id, params); - return response.data; - } - - async deleteEnterprise(id: string): Promise { - await this.enterprisesApi.deleteEnterprise(id); - } - - async listEnterprisePools(id: string): Promise { - const response = await this.enterprisesApi.listEnterprisePools(id); - return response.data || []; - } - - async listEnterpriseInstances(id: string): Promise { - const response = await this.enterprisesApi.listEnterpriseInstances(id); - return response.data || []; - } - - async createEnterprisePool(id: string, params: CreatePoolParams): Promise { - const response = await this.enterprisesApi.createEnterprisePool(id, params); - return response.data; - } - - // Scale sets for repositories, organizations, and enterprises - async createRepositoryScaleSet(id: string, params: CreateScaleSetParams): Promise { - const response = await this.repositoriesApi.createRepoScaleSet(id, params); - return response.data; - } - - async listRepositoryScaleSets(id: string): Promise { - const response = await this.repositoriesApi.listRepoScaleSets(id); - return response.data || []; - } - - async createOrganizationScaleSet(id: string, params: CreateScaleSetParams): Promise { - const response = await this.organizationsApi.createOrgScaleSet(id, params); - return response.data; - } - - async listOrganizationScaleSets(id: string): Promise { - const response = await this.organizationsApi.listOrgScaleSets(id); - return response.data || []; - } - - async createEnterpriseScaleSet(id: string, params: CreateScaleSetParams): Promise { - const response = await this.enterprisesApi.createEnterpriseScaleSet(id, params); - return response.data; - } - - async listEnterpriseScaleSets(id: string): Promise { - const response = await this.enterprisesApi.listEnterpriseScaleSets(id); - return response.data || []; - } - - // Pools - async listPools(): Promise { - const response = await this.poolsApi.listPools(); - return response.data || []; - } - - async listAllPools(): Promise { - return this.listPools(); - } - - async getPool(id: string): Promise { - const response = await this.poolsApi.getPool(id); - return response.data; - } - - async updatePool(id: string, params: UpdatePoolParams): Promise { - const response = await this.poolsApi.updatePool(id, params); - return response.data; - } - - async deletePool(id: string): Promise { - await this.poolsApi.deletePool(id); - } - - // Scale Sets - async listScaleSets(): Promise { - const response = await this.scaleSetsApi.listScalesets(); - return response.data || []; - } - - async getScaleSet(id: number): Promise { - const response = await this.scaleSetsApi.getScaleSet(id.toString()); - return response.data; - } - - async updateScaleSet(id: number, params: Partial): Promise { - const response = await this.scaleSetsApi.updateScaleSet(id.toString(), params); - return response.data; - } - - async deleteScaleSet(id: number): Promise { - await this.scaleSetsApi.deleteScaleSet(id.toString()); - } - - // Instances - async listInstances(): Promise { - const response = await this.instancesApi.listInstances(); - return response.data || []; - } - - async getInstance(name: string): Promise { - const response = await this.instancesApi.getInstance(name); - return response.data; - } - - async deleteInstance(name: string): Promise { - await this.instancesApi.deleteInstance(name); - } - - // Providers - async listProviders(): Promise { - const response = await this.providersApi.listProviders(); - return response.data || []; - } - - // Compatibility aliases - async listCredentials(): Promise { - return this.listAllCredentials(); - } - - async listEndpoints(): Promise { - return this.listAllEndpoints(); - } - - // First-run initialization - async firstRun(params: NewUserParams): Promise { - const response = await this.firstRunApi.firstRun(params); - return response.data; - } - - // Controller management - async updateController(params: UpdateControllerParams): Promise { - const response = await this.controllerApi.updateController(params); - return response.data; - } -} - -// Create a singleton instance -export const generatedGarmApi = new GeneratedGarmApiClient(); \ No newline at end of file diff --git a/webapp/src/lib/api/generated/.gitignore b/webapp/src/lib/api/generated/.gitignore deleted file mode 100644 index 149b5765..00000000 --- a/webapp/src/lib/api/generated/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -wwwroot/*.js -node_modules -typings -dist diff --git a/webapp/src/lib/api/generated/.npmignore b/webapp/src/lib/api/generated/.npmignore deleted file mode 100644 index 999d88df..00000000 --- a/webapp/src/lib/api/generated/.npmignore +++ /dev/null @@ -1 +0,0 @@ -# empty npmignore to ensure all required files (e.g., in the dist folder) are published by npm \ No newline at end of file diff --git a/webapp/src/lib/api/generated/.openapi-generator-ignore b/webapp/src/lib/api/generated/.openapi-generator-ignore deleted file mode 100644 index 7484ee59..00000000 --- a/webapp/src/lib/api/generated/.openapi-generator-ignore +++ /dev/null @@ -1,23 +0,0 @@ -# OpenAPI Generator Ignore -# Generated by openapi-generator https://github.com/openapitools/openapi-generator - -# Use this file to prevent files from being overwritten by the generator. -# The patterns follow closely to .gitignore or .dockerignore. - -# As an example, the C# client generator defines ApiClient.cs. -# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line: -#ApiClient.cs - -# You can match any string of characters against a directory, file or extension with a single asterisk (*): -#foo/*/qux -# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux - -# You can recursively match patterns against a directory, file or extension with a double asterisk (**): -#foo/**/qux -# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux - -# You can also negate patterns with an exclamation (!). -# For example, you can ignore all files in a docs folder with the file extension .md: -#docs/*.md -# Then explicitly reverse the ignore rule for a single file: -#!docs/README.md diff --git a/webapp/src/lib/api/generated/.openapi-generator/FILES b/webapp/src/lib/api/generated/.openapi-generator/FILES deleted file mode 100644 index 5f89d927..00000000 --- a/webapp/src/lib/api/generated/.openapi-generator/FILES +++ /dev/null @@ -1,70 +0,0 @@ -.gitignore -.npmignore -.openapi-generator-ignore -api.ts -base.ts -common.ts -configuration.ts -docs/APIErrorResponse.md -docs/Address.md -docs/ControllerApi.md -docs/ControllerInfo.md -docs/ControllerInfoApi.md -docs/CreateEnterpriseParams.md -docs/CreateGiteaCredentialsParams.md -docs/CreateGiteaEndpointParams.md -docs/CreateGithubCredentialsParams.md -docs/CreateGithubEndpointParams.md -docs/CreateOrgParams.md -docs/CreatePoolParams.md -docs/CreateRepoParams.md -docs/CreateScaleSetParams.md -docs/CredentialsApi.md -docs/EndpointsApi.md -docs/Enterprise.md -docs/EnterprisesApi.md -docs/EntityEvent.md -docs/FirstRunApi.md -docs/ForgeCredentials.md -docs/ForgeEndpoint.md -docs/ForgeEntity.md -docs/GithubApp.md -docs/GithubPAT.md -docs/GithubRateLimit.md -docs/HookInfo.md -docs/HooksApi.md -docs/InstallWebhookParams.md -docs/Instance.md -docs/InstancesApi.md -docs/JWTResponse.md -docs/Job.md -docs/JobsApi.md -docs/LoginApi.md -docs/MetricsTokenApi.md -docs/NewUserParams.md -docs/Organization.md -docs/OrganizationsApi.md -docs/PasswordLoginParams.md -docs/Pool.md -docs/PoolManagerStatus.md -docs/PoolsApi.md -docs/Provider.md -docs/ProvidersApi.md -docs/RepositoriesApi.md -docs/Repository.md -docs/RunnerPrefix.md -docs/ScaleSet.md -docs/ScalesetsApi.md -docs/StatusMessage.md -docs/Tag.md -docs/UpdateControllerParams.md -docs/UpdateEntityParams.md -docs/UpdateGiteaCredentialsParams.md -docs/UpdateGiteaEndpointParams.md -docs/UpdateGithubCredentialsParams.md -docs/UpdateGithubEndpointParams.md -docs/UpdatePoolParams.md -docs/UpdateScaleSetParams.md -docs/User.md -git_push.sh -index.ts diff --git a/webapp/src/lib/api/generated/.openapi-generator/VERSION b/webapp/src/lib/api/generated/.openapi-generator/VERSION deleted file mode 100644 index e465da43..00000000 --- a/webapp/src/lib/api/generated/.openapi-generator/VERSION +++ /dev/null @@ -1 +0,0 @@ -7.14.0 diff --git a/webapp/src/lib/api/generated/api.ts b/webapp/src/lib/api/generated/api.ts deleted file mode 100644 index 41d28ce1..00000000 --- a/webapp/src/lib/api/generated/api.ts +++ /dev/null @@ -1,11684 +0,0 @@ -/* tslint:disable */ -/* eslint-disable */ -/** - * Garm API. - * The Garm API generated using go-swagger. - * - * The version of the OpenAPI document: 1.0.0 - * - * - * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - * https://openapi-generator.tech - * Do not edit the class manually. - */ - - -import type { Configuration } from './configuration'; -import type { AxiosPromise, AxiosInstance, RawAxiosRequestConfig } from 'axios'; -import globalAxios from 'axios'; -// Some imports not used depending on template conditions -// @ts-ignore -import { DUMMY_BASE_URL, assertParamExists, setApiKeyToObject, setBasicAuthToObject, setBearerAuthToObject, setOAuthToObject, setSearchParams, serializeDataIfNeeded, toPathString, createRequestFunction } from './common'; -import type { RequestArgs } from './base'; -// @ts-ignore -import { BASE_PATH, COLLECTION_FORMATS, BaseAPI, RequiredError, operationServerMap } from './base'; - -/** - * - * @export - * @interface APIErrorResponse - */ -export interface APIErrorResponse { - /** - * - * @type {string} - * @memberof APIErrorResponse - */ - 'details'?: string; - /** - * - * @type {string} - * @memberof APIErrorResponse - */ - 'error'?: string; -} -/** - * - * @export - * @interface Address - */ -export interface Address { - /** - * - * @type {string} - * @memberof Address - */ - 'address'?: string; - /** - * - * @type {string} - * @memberof Address - */ - 'type'?: string; -} -/** - * - * @export - * @interface ControllerInfo - */ -export interface ControllerInfo { - /** - * CallbackURL is the URL where instances can send updates back to the controller. This URL is used by instances to send status updates back to the controller. The URL itself may be made available to instances via a reverse proxy or a load balancer. That means that the user is responsible for telling GARM what the public URL is, by setting this field. - * @type {string} - * @memberof ControllerInfo - */ - 'callback_url'?: string; - /** - * ControllerID is the unique ID of this controller. This ID gets generated automatically on controller init. - * @type {string} - * @memberof ControllerInfo - */ - 'controller_id'?: string; - /** - * ControllerWebhookURL is the controller specific URL where webhooks will be received. This field holds the WebhookURL defined above to which we append the ControllerID. Functionally it is the same as WebhookURL, but it allows us to safely manage webhooks from GARM without accidentally removing webhooks from other services or GARM controllers. - * @type {string} - * @memberof ControllerInfo - */ - 'controller_webhook_url'?: string; - /** - * Hostname is the hostname of the machine that runs this controller. In the future, this field will be migrated to a separate table that will keep track of each the controller nodes that are part of a cluster. This will happen when we implement controller scale-out capability. - * @type {string} - * @memberof ControllerInfo - */ - 'hostname'?: string; - /** - * MetadataURL is the public metadata URL of the GARM instance. This URL is used by instances to fetch information they need to set themselves up. The URL itself may be made available to runners via a reverse proxy or a load balancer. That means that the user is responsible for telling GARM what the public URL is, by setting this field. - * @type {string} - * @memberof ControllerInfo - */ - 'metadata_url'?: string; - /** - * MinimumJobAgeBackoff is the minimum time in seconds that a job must be in queued state before GARM will attempt to allocate a runner for it. When set to a non zero value, GARM will ignore the job until the job\'s age is greater than this value. When using the min_idle_runners feature of a pool, this gives enough time for potential idle runners to pick up the job before GARM attempts to allocate a new runner, thus avoiding the need to potentially scale down runners later. - * @type {number} - * @memberof ControllerInfo - */ - 'minimum_job_age_backoff'?: number; - /** - * Version is the version of the GARM controller. - * @type {string} - * @memberof ControllerInfo - */ - 'version'?: string; - /** - * WebhookURL is the base URL where the controller will receive webhooks from github. When webhook management is used, this URL is used as a base to which the controller UUID is appended and which will receive the webhooks. The URL itself may be made available to instances via a reverse proxy or a load balancer. That means that the user is responsible for telling GARM what the public URL is, by setting this field. - * @type {string} - * @memberof ControllerInfo - */ - 'webhook_url'?: string; -} -/** - * - * @export - * @interface CreateEnterpriseParams - */ -export interface CreateEnterpriseParams { - /** - * - * @type {string} - * @memberof CreateEnterpriseParams - */ - 'credentials_name'?: string; - /** - * - * @type {string} - * @memberof CreateEnterpriseParams - */ - 'name'?: string; - /** - * - * @type {string} - * @memberof CreateEnterpriseParams - */ - 'pool_balancer_type'?: string; - /** - * - * @type {string} - * @memberof CreateEnterpriseParams - */ - 'webhook_secret'?: string; -} -/** - * - * @export - * @interface CreateGiteaCredentialsParams - */ -export interface CreateGiteaCredentialsParams { - /** - * - * @type {GithubApp} - * @memberof CreateGiteaCredentialsParams - */ - 'app'?: GithubApp; - /** - * - * @type {string} - * @memberof CreateGiteaCredentialsParams - */ - 'auth_type'?: string; - /** - * - * @type {string} - * @memberof CreateGiteaCredentialsParams - */ - 'description'?: string; - /** - * - * @type {string} - * @memberof CreateGiteaCredentialsParams - */ - 'endpoint'?: string; - /** - * - * @type {string} - * @memberof CreateGiteaCredentialsParams - */ - 'name'?: string; - /** - * - * @type {GithubPAT} - * @memberof CreateGiteaCredentialsParams - */ - 'pat'?: GithubPAT; -} -/** - * - * @export - * @interface CreateGiteaEndpointParams - */ -export interface CreateGiteaEndpointParams { - /** - * - * @type {string} - * @memberof CreateGiteaEndpointParams - */ - 'api_base_url'?: string; - /** - * - * @type {string} - * @memberof CreateGiteaEndpointParams - */ - 'base_url'?: string; - /** - * - * @type {Array} - * @memberof CreateGiteaEndpointParams - */ - 'ca_cert_bundle'?: Array; - /** - * - * @type {string} - * @memberof CreateGiteaEndpointParams - */ - 'description'?: string; - /** - * - * @type {string} - * @memberof CreateGiteaEndpointParams - */ - 'name'?: string; -} -/** - * - * @export - * @interface CreateGithubCredentialsParams - */ -export interface CreateGithubCredentialsParams { - /** - * - * @type {GithubApp} - * @memberof CreateGithubCredentialsParams - */ - 'app'?: GithubApp; - /** - * - * @type {string} - * @memberof CreateGithubCredentialsParams - */ - 'auth_type'?: string; - /** - * - * @type {string} - * @memberof CreateGithubCredentialsParams - */ - 'description'?: string; - /** - * - * @type {string} - * @memberof CreateGithubCredentialsParams - */ - 'endpoint'?: string; - /** - * - * @type {string} - * @memberof CreateGithubCredentialsParams - */ - 'name'?: string; - /** - * - * @type {GithubPAT} - * @memberof CreateGithubCredentialsParams - */ - 'pat'?: GithubPAT; -} -/** - * - * @export - * @interface CreateGithubEndpointParams - */ -export interface CreateGithubEndpointParams { - /** - * - * @type {string} - * @memberof CreateGithubEndpointParams - */ - 'api_base_url'?: string; - /** - * - * @type {string} - * @memberof CreateGithubEndpointParams - */ - 'base_url'?: string; - /** - * - * @type {Array} - * @memberof CreateGithubEndpointParams - */ - 'ca_cert_bundle'?: Array; - /** - * - * @type {string} - * @memberof CreateGithubEndpointParams - */ - 'description'?: string; - /** - * - * @type {string} - * @memberof CreateGithubEndpointParams - */ - 'name'?: string; - /** - * - * @type {string} - * @memberof CreateGithubEndpointParams - */ - 'upload_base_url'?: string; -} -/** - * - * @export - * @interface CreateOrgParams - */ -export interface CreateOrgParams { - /** - * - * @type {string} - * @memberof CreateOrgParams - */ - 'credentials_name'?: string; - /** - * - * @type {string} - * @memberof CreateOrgParams - */ - 'forge_type'?: string; - /** - * - * @type {string} - * @memberof CreateOrgParams - */ - 'name'?: string; - /** - * - * @type {string} - * @memberof CreateOrgParams - */ - 'pool_balancer_type'?: string; - /** - * - * @type {string} - * @memberof CreateOrgParams - */ - 'webhook_secret'?: string; -} -/** - * - * @export - * @interface CreatePoolParams - */ -export interface CreatePoolParams { - /** - * - * @type {boolean} - * @memberof CreatePoolParams - */ - 'enabled'?: boolean; - /** - * - * @type {object} - * @memberof CreatePoolParams - */ - 'extra_specs'?: object; - /** - * - * @type {string} - * @memberof CreatePoolParams - */ - 'flavor'?: string; - /** - * GithubRunnerGroup is the github runner group in which the runners of this pool will be added to. The runner group must be created by someone with access to the enterprise. - * @type {string} - * @memberof CreatePoolParams - */ - 'github-runner-group'?: string; - /** - * - * @type {string} - * @memberof CreatePoolParams - */ - 'image'?: string; - /** - * - * @type {number} - * @memberof CreatePoolParams - */ - 'max_runners'?: number; - /** - * - * @type {number} - * @memberof CreatePoolParams - */ - 'min_idle_runners'?: number; - /** - * - * @type {string} - * @memberof CreatePoolParams - */ - 'os_arch'?: string; - /** - * - * @type {string} - * @memberof CreatePoolParams - */ - 'os_type'?: string; - /** - * - * @type {number} - * @memberof CreatePoolParams - */ - 'priority'?: number; - /** - * - * @type {string} - * @memberof CreatePoolParams - */ - 'provider_name'?: string; - /** - * - * @type {number} - * @memberof CreatePoolParams - */ - 'runner_bootstrap_timeout'?: number; - /** - * - * @type {string} - * @memberof CreatePoolParams - */ - 'runner_prefix'?: string; - /** - * - * @type {Array} - * @memberof CreatePoolParams - */ - 'tags'?: Array; -} -/** - * - * @export - * @interface CreateRepoParams - */ -export interface CreateRepoParams { - /** - * - * @type {string} - * @memberof CreateRepoParams - */ - 'credentials_name'?: string; - /** - * - * @type {string} - * @memberof CreateRepoParams - */ - 'forge_type'?: string; - /** - * - * @type {string} - * @memberof CreateRepoParams - */ - 'name'?: string; - /** - * - * @type {string} - * @memberof CreateRepoParams - */ - 'owner'?: string; - /** - * - * @type {string} - * @memberof CreateRepoParams - */ - 'pool_balancer_type'?: string; - /** - * - * @type {string} - * @memberof CreateRepoParams - */ - 'webhook_secret'?: string; -} -/** - * - * @export - * @interface CreateScaleSetParams - */ -export interface CreateScaleSetParams { - /** - * - * @type {boolean} - * @memberof CreateScaleSetParams - */ - 'disable_update'?: boolean; - /** - * - * @type {boolean} - * @memberof CreateScaleSetParams - */ - 'enabled'?: boolean; - /** - * - * @type {object} - * @memberof CreateScaleSetParams - */ - 'extra_specs'?: object; - /** - * - * @type {string} - * @memberof CreateScaleSetParams - */ - 'flavor'?: string; - /** - * GithubRunnerGroup is the github runner group in which the runners of this pool will be added to. The runner group must be created by someone with access to the enterprise. - * @type {string} - * @memberof CreateScaleSetParams - */ - 'github-runner-group'?: string; - /** - * - * @type {string} - * @memberof CreateScaleSetParams - */ - 'image'?: string; - /** - * - * @type {number} - * @memberof CreateScaleSetParams - */ - 'max_runners'?: number; - /** - * - * @type {number} - * @memberof CreateScaleSetParams - */ - 'min_idle_runners'?: number; - /** - * - * @type {string} - * @memberof CreateScaleSetParams - */ - 'name'?: string; - /** - * - * @type {string} - * @memberof CreateScaleSetParams - */ - 'os_arch'?: string; - /** - * - * @type {string} - * @memberof CreateScaleSetParams - */ - 'os_type'?: string; - /** - * - * @type {string} - * @memberof CreateScaleSetParams - */ - 'provider_name'?: string; - /** - * - * @type {number} - * @memberof CreateScaleSetParams - */ - 'runner_bootstrap_timeout'?: number; - /** - * - * @type {string} - * @memberof CreateScaleSetParams - */ - 'runner_prefix'?: string; - /** - * - * @type {number} - * @memberof CreateScaleSetParams - */ - 'scale_set_id'?: number; - /** - * - * @type {Array} - * @memberof CreateScaleSetParams - */ - 'tags'?: Array; -} -/** - * - * @export - * @interface Enterprise - */ -export interface Enterprise { - /** - * - * @type {string} - * @memberof Enterprise - */ - 'created_at'?: string; - /** - * - * @type {ForgeCredentials} - * @memberof Enterprise - */ - 'credentials'?: ForgeCredentials; - /** - * - * @type {number} - * @memberof Enterprise - */ - 'credentials_id'?: number; - /** - * CredentialName is the name of the credentials associated with the enterprise. This field is now deprecated. Use CredentialsID instead. This field will be removed in v0.2.0. - * @type {string} - * @memberof Enterprise - */ - 'credentials_name'?: string; - /** - * - * @type {ForgeEndpoint} - * @memberof Enterprise - */ - 'endpoint'?: ForgeEndpoint; - /** - * - * @type {Array} - * @memberof Enterprise - */ - 'events'?: Array; - /** - * - * @type {string} - * @memberof Enterprise - */ - 'id'?: string; - /** - * - * @type {string} - * @memberof Enterprise - */ - 'name'?: string; - /** - * - * @type {Array} - * @memberof Enterprise - */ - 'pool'?: Array; - /** - * - * @type {string} - * @memberof Enterprise - */ - 'pool_balancing_type'?: string; - /** - * - * @type {PoolManagerStatus} - * @memberof Enterprise - */ - 'pool_manager_status'?: PoolManagerStatus; - /** - * - * @type {string} - * @memberof Enterprise - */ - 'updated_at'?: string; -} -/** - * - * @export - * @interface EntityEvent - */ -export interface EntityEvent { - /** - * - * @type {string} - * @memberof EntityEvent - */ - 'created_at'?: string; - /** - * - * @type {string} - * @memberof EntityEvent - */ - 'event_level'?: string; - /** - * - * @type {string} - * @memberof EntityEvent - */ - 'event_type'?: string; - /** - * - * @type {number} - * @memberof EntityEvent - */ - 'id'?: number; - /** - * - * @type {string} - * @memberof EntityEvent - */ - 'message'?: string; -} -/** - * - * @export - * @interface ForgeCredentials - */ -export interface ForgeCredentials { - /** - * - * @type {string} - * @memberof ForgeCredentials - */ - 'api_base_url'?: string; - /** - * - * @type {string} - * @memberof ForgeCredentials - */ - 'auth-type'?: string; - /** - * - * @type {string} - * @memberof ForgeCredentials - */ - 'base_url'?: string; - /** - * - * @type {Array} - * @memberof ForgeCredentials - */ - 'ca_bundle'?: Array; - /** - * - * @type {string} - * @memberof ForgeCredentials - */ - 'created_at'?: string; - /** - * - * @type {string} - * @memberof ForgeCredentials - */ - 'description'?: string; - /** - * - * @type {ForgeEndpoint} - * @memberof ForgeCredentials - */ - 'endpoint'?: ForgeEndpoint; - /** - * - * @type {Array} - * @memberof ForgeCredentials - */ - 'enterprises'?: Array; - /** - * - * @type {string} - * @memberof ForgeCredentials - */ - 'forge_type'?: string; - /** - * - * @type {number} - * @memberof ForgeCredentials - */ - 'id'?: number; - /** - * - * @type {string} - * @memberof ForgeCredentials - */ - 'name'?: string; - /** - * - * @type {Array} - * @memberof ForgeCredentials - */ - 'organizations'?: Array; - /** - * - * @type {GithubRateLimit} - * @memberof ForgeCredentials - */ - 'rate_limit'?: GithubRateLimit; - /** - * - * @type {Array} - * @memberof ForgeCredentials - */ - 'repositories'?: Array; - /** - * - * @type {string} - * @memberof ForgeCredentials - */ - 'updated_at'?: string; - /** - * - * @type {string} - * @memberof ForgeCredentials - */ - 'upload_base_url'?: string; -} -/** - * - * @export - * @interface ForgeEndpoint - */ -export interface ForgeEndpoint { - /** - * - * @type {string} - * @memberof ForgeEndpoint - */ - 'api_base_url'?: string; - /** - * - * @type {string} - * @memberof ForgeEndpoint - */ - 'base_url'?: string; - /** - * - * @type {Array} - * @memberof ForgeEndpoint - */ - 'ca_cert_bundle'?: Array; - /** - * - * @type {string} - * @memberof ForgeEndpoint - */ - 'created_at'?: string; - /** - * - * @type {string} - * @memberof ForgeEndpoint - */ - 'description'?: string; - /** - * - * @type {string} - * @memberof ForgeEndpoint - */ - 'endpoint_type'?: string; - /** - * - * @type {string} - * @memberof ForgeEndpoint - */ - 'name'?: string; - /** - * - * @type {string} - * @memberof ForgeEndpoint - */ - 'updated_at'?: string; - /** - * - * @type {string} - * @memberof ForgeEndpoint - */ - 'upload_base_url'?: string; -} -/** - * - * @export - * @interface ForgeEntity - */ -export interface ForgeEntity { - /** - * - * @type {number} - * @memberof ForgeEntity - */ - 'agent_id'?: number; - /** - * - * @type {string} - * @memberof ForgeEntity - */ - 'os_name'?: string; - /** - * - * @type {string} - * @memberof ForgeEntity - */ - 'os_version'?: string; -} -/** - * - * @export - * @interface GithubApp - */ -export interface GithubApp { - /** - * - * @type {number} - * @memberof GithubApp - */ - 'app_id'?: number; - /** - * - * @type {number} - * @memberof GithubApp - */ - 'installation_id'?: number; - /** - * - * @type {Array} - * @memberof GithubApp - */ - 'private_key_bytes'?: Array; -} -/** - * - * @export - * @interface GithubPAT - */ -export interface GithubPAT { - /** - * - * @type {string} - * @memberof GithubPAT - */ - 'oauth2_token'?: string; -} -/** - * - * @export - * @interface GithubRateLimit - */ -export interface GithubRateLimit { - /** - * - * @type {number} - * @memberof GithubRateLimit - */ - 'limit'?: number; - /** - * - * @type {number} - * @memberof GithubRateLimit - */ - 'remaining'?: number; - /** - * - * @type {number} - * @memberof GithubRateLimit - */ - 'reset'?: number; - /** - * - * @type {number} - * @memberof GithubRateLimit - */ - 'used'?: number; -} -/** - * - * @export - * @interface HookInfo - */ -export interface HookInfo { - /** - * - * @type {boolean} - * @memberof HookInfo - */ - 'active'?: boolean; - /** - * - * @type {Array} - * @memberof HookInfo - */ - 'events'?: Array; - /** - * - * @type {number} - * @memberof HookInfo - */ - 'id'?: number; - /** - * - * @type {boolean} - * @memberof HookInfo - */ - 'insecure_ssl'?: boolean; - /** - * - * @type {string} - * @memberof HookInfo - */ - 'url'?: string; -} -/** - * - * @export - * @interface InstallWebhookParams - */ -export interface InstallWebhookParams { - /** - * - * @type {boolean} - * @memberof InstallWebhookParams - */ - 'insecure_ssl'?: boolean; - /** - * - * @type {string} - * @memberof InstallWebhookParams - */ - 'webhook_endpoint_type'?: string; -} -/** - * - * @export - * @interface Instance - */ -export interface Instance { - /** - * Addresses is a list of IP addresses the provider reports for this instance. - * @type {Array
                } - * @memberof Instance - */ - 'addresses'?: Array
                ; - /** - * AgentID is the github runner agent ID. - * @type {number} - * @memberof Instance - */ - 'agent_id'?: number; - /** - * CreatedAt is the timestamp of the creation of this runner. - * @type {string} - * @memberof Instance - */ - 'created_at'?: string; - /** - * GithubRunnerGroup is the github runner group to which the runner belongs. The runner group must be created by someone with access to the enterprise. - * @type {string} - * @memberof Instance - */ - 'github-runner-group'?: string; - /** - * ID is the database ID of this instance. - * @type {string} - * @memberof Instance - */ - 'id'?: string; - /** - * - * @type {Job} - * @memberof Instance - */ - 'job'?: Job; - /** - * Name is the name associated with an instance. Depending on the provider, this may or may not be useful in the context of the provider, but we can use it internally to identify the instance. - * @type {string} - * @memberof Instance - */ - 'name'?: string; - /** - * - * @type {string} - * @memberof Instance - */ - 'os_arch'?: string; - /** - * OSName is the name of the OS. Eg: ubuntu, centos, etc. - * @type {string} - * @memberof Instance - */ - 'os_name'?: string; - /** - * - * @type {string} - * @memberof Instance - */ - 'os_type'?: string; - /** - * OSVersion is the version of the operating system. - * @type {string} - * @memberof Instance - */ - 'os_version'?: string; - /** - * PoolID is the ID of the garm pool to which a runner belongs. - * @type {string} - * @memberof Instance - */ - 'pool_id'?: string; - /** - * ProviderFault holds any error messages captured from the IaaS provider that is responsible for managing the lifecycle of the runner. - * @type {Array} - * @memberof Instance - */ - 'provider_fault'?: Array; - /** - * PeoviderID is the unique ID the provider associated with the compute instance. We use this to identify the instance in the provider. - * @type {string} - * @memberof Instance - */ - 'provider_id'?: string; - /** - * ProviderName is the name of the IaaS where the instance was created. - * @type {string} - * @memberof Instance - */ - 'provider_name'?: string; - /** - * - * @type {string} - * @memberof Instance - */ - 'runner_status'?: string; - /** - * ScaleSetID is the ID of the scale set to which a runner belongs. - * @type {number} - * @memberof Instance - */ - 'scale_set_id'?: number; - /** - * - * @type {string} - * @memberof Instance - */ - 'status'?: string; - /** - * StatusMessages is a list of status messages sent back by the runner as it sets itself up. - * @type {Array} - * @memberof Instance - */ - 'status_messages'?: Array; - /** - * UpdatedAt is the timestamp of the last update to this runner. - * @type {string} - * @memberof Instance - */ - 'updated_at'?: string; -} -/** - * JWTResponse holds the JWT token returned as a result of a successful auth - * @export - * @interface JWTResponse - */ -export interface JWTResponse { - /** - * - * @type {string} - * @memberof JWTResponse - */ - 'token'?: string; -} -/** - * - * @export - * @interface Job - */ -export interface Job { - /** - * Action is the specific activity that triggered the event. - * @type {string} - * @memberof Job - */ - 'action'?: string; - /** - * - * @type {string} - * @memberof Job - */ - 'completed_at'?: string; - /** - * Conclusion is the outcome of the job. Possible values: \"success\", \"failure\", \"neutral\", \"cancelled\", \"skipped\", \"timed_out\", \"action_required\" - * @type {string} - * @memberof Job - */ - 'conclusion'?: string; - /** - * - * @type {string} - * @memberof Job - */ - 'created_at'?: string; - /** - * - * @type {string} - * @memberof Job - */ - 'enterprise_id'?: string; - /** - * ID is the ID of the job. - * @type {number} - * @memberof Job - */ - 'id'?: number; - /** - * - * @type {Array} - * @memberof Job - */ - 'labels'?: Array; - /** - * - * @type {string} - * @memberof Job - */ - 'locked_by'?: string; - /** - * Name is the name if the job that was triggered. - * @type {string} - * @memberof Job - */ - 'name'?: string; - /** - * - * @type {string} - * @memberof Job - */ - 'org_id'?: string; - /** - * The entity that received the hook. Webhooks may be configured on the repo, the org and/or the enterprise. If we only configure a repo to use garm, we\'ll only ever receive a webhook from the repo. But if we configure the parent org of the repo and the parent enterprise of the org to use garm, a webhook will be sent for each entity type, in response to one workflow event. Thus, we will get 3 webhooks with the same run_id and job id. Record all involved entities in the same job if we have them configured in garm. - * @type {string} - * @memberof Job - */ - 'repo_id'?: string; - /** - * repository in which the job was triggered. - * @type {string} - * @memberof Job - */ - 'repository_name'?: string; - /** - * - * @type {string} - * @memberof Job - */ - 'repository_owner'?: string; - /** - * RunID is the ID of the workflow run. A run may have multiple jobs. - * @type {number} - * @memberof Job - */ - 'run_id'?: number; - /** - * - * @type {number} - * @memberof Job - */ - 'runner_group_id'?: number; - /** - * - * @type {string} - * @memberof Job - */ - 'runner_group_name'?: string; - /** - * - * @type {number} - * @memberof Job - */ - 'runner_id'?: number; - /** - * - * @type {string} - * @memberof Job - */ - 'runner_name'?: string; - /** - * ScaleSetJobID is the job ID when generated for a scale set. - * @type {string} - * @memberof Job - */ - 'scaleset_job_id'?: string; - /** - * - * @type {string} - * @memberof Job - */ - 'started_at'?: string; - /** - * Status is the phase of the lifecycle that the job is currently in. \"queued\", \"in_progress\" and \"completed\". - * @type {string} - * @memberof Job - */ - 'status'?: string; - /** - * - * @type {string} - * @memberof Job - */ - 'updated_at'?: string; - /** - * - * @type {number} - * @memberof Job - */ - 'workflow_job_id'?: number; -} -/** - * NewUserParams holds the needed information to create a new user - * @export - * @interface NewUserParams - */ -export interface NewUserParams { - /** - * - * @type {string} - * @memberof NewUserParams - */ - 'email'?: string; - /** - * - * @type {string} - * @memberof NewUserParams - */ - 'full_name'?: string; - /** - * - * @type {string} - * @memberof NewUserParams - */ - 'password'?: string; - /** - * - * @type {string} - * @memberof NewUserParams - */ - 'username'?: string; -} -/** - * - * @export - * @interface Organization - */ -export interface Organization { - /** - * - * @type {string} - * @memberof Organization - */ - 'created_at'?: string; - /** - * - * @type {ForgeCredentials} - * @memberof Organization - */ - 'credentials'?: ForgeCredentials; - /** - * - * @type {number} - * @memberof Organization - */ - 'credentials_id'?: number; - /** - * CredentialName is the name of the credentials associated with the enterprise. This field is now deprecated. Use CredentialsID instead. This field will be removed in v0.2.0. - * @type {string} - * @memberof Organization - */ - 'credentials_name'?: string; - /** - * - * @type {ForgeEndpoint} - * @memberof Organization - */ - 'endpoint'?: ForgeEndpoint; - /** - * - * @type {Array} - * @memberof Organization - */ - 'events'?: Array; - /** - * - * @type {string} - * @memberof Organization - */ - 'id'?: string; - /** - * - * @type {string} - * @memberof Organization - */ - 'name'?: string; - /** - * - * @type {Array} - * @memberof Organization - */ - 'pool'?: Array; - /** - * - * @type {string} - * @memberof Organization - */ - 'pool_balancing_type'?: string; - /** - * - * @type {PoolManagerStatus} - * @memberof Organization - */ - 'pool_manager_status'?: PoolManagerStatus; - /** - * - * @type {string} - * @memberof Organization - */ - 'updated_at'?: string; -} -/** - * - * @export - * @interface PasswordLoginParams - */ -export interface PasswordLoginParams { - /** - * - * @type {string} - * @memberof PasswordLoginParams - */ - 'password'?: string; - /** - * - * @type {string} - * @memberof PasswordLoginParams - */ - 'username'?: string; -} -/** - * - * @export - * @interface Pool - */ -export interface Pool { - /** - * - * @type {string} - * @memberof Pool - */ - 'created_at'?: string; - /** - * - * @type {boolean} - * @memberof Pool - */ - 'enabled'?: boolean; - /** - * - * @type {ForgeEndpoint} - * @memberof Pool - */ - 'endpoint'?: ForgeEndpoint; - /** - * - * @type {string} - * @memberof Pool - */ - 'enterprise_id'?: string; - /** - * - * @type {string} - * @memberof Pool - */ - 'enterprise_name'?: string; - /** - * ExtraSpecs is an opaque raw json that gets sent to the provider as part of the bootstrap params for instances. It can contain any kind of data needed by providers. The contents of this field means nothing to garm itself. We don\'t act on the information in this field at all. We only validate that it\'s a proper json. - * @type {object} - * @memberof Pool - */ - 'extra_specs'?: object; - /** - * - * @type {string} - * @memberof Pool - */ - 'flavor'?: string; - /** - * GithubRunnerGroup is the github runner group in which the runners will be added. The runner group must be created by someone with access to the enterprise. - * @type {string} - * @memberof Pool - */ - 'github-runner-group'?: string; - /** - * - * @type {string} - * @memberof Pool - */ - 'id'?: string; - /** - * - * @type {string} - * @memberof Pool - */ - 'image'?: string; - /** - * - * @type {Array} - * @memberof Pool - */ - 'instances'?: Array; - /** - * - * @type {number} - * @memberof Pool - */ - 'max_runners'?: number; - /** - * - * @type {number} - * @memberof Pool - */ - 'min_idle_runners'?: number; - /** - * - * @type {string} - * @memberof Pool - */ - 'org_id'?: string; - /** - * - * @type {string} - * @memberof Pool - */ - 'org_name'?: string; - /** - * - * @type {string} - * @memberof Pool - */ - 'os_arch'?: string; - /** - * - * @type {string} - * @memberof Pool - */ - 'os_type'?: string; - /** - * Priority is the priority of the pool. The higher the number, the higher the priority. When fetching matching pools for a set of tags, the result will be sorted in descending order of priority. - * @type {number} - * @memberof Pool - */ - 'priority'?: number; - /** - * - * @type {string} - * @memberof Pool - */ - 'provider_name'?: string; - /** - * - * @type {string} - * @memberof Pool - */ - 'repo_id'?: string; - /** - * - * @type {string} - * @memberof Pool - */ - 'repo_name'?: string; - /** - * - * @type {number} - * @memberof Pool - */ - 'runner_bootstrap_timeout'?: number; - /** - * - * @type {string} - * @memberof Pool - */ - 'runner_prefix'?: string; - /** - * - * @type {Array} - * @memberof Pool - */ - 'tags'?: Array; - /** - * - * @type {string} - * @memberof Pool - */ - 'updated_at'?: string; -} -/** - * - * @export - * @interface PoolManagerStatus - */ -export interface PoolManagerStatus { - /** - * - * @type {string} - * @memberof PoolManagerStatus - */ - 'failure_reason'?: string; - /** - * - * @type {boolean} - * @memberof PoolManagerStatus - */ - 'running'?: boolean; -} -/** - * - * @export - * @interface Provider - */ -export interface Provider { - /** - * - * @type {string} - * @memberof Provider - */ - 'description'?: string; - /** - * - * @type {string} - * @memberof Provider - */ - 'name'?: string; - /** - * - * @type {string} - * @memberof Provider - */ - 'type'?: string; -} -/** - * - * @export - * @interface Repository - */ -export interface Repository { - /** - * - * @type {string} - * @memberof Repository - */ - 'created_at'?: string; - /** - * - * @type {ForgeCredentials} - * @memberof Repository - */ - 'credentials'?: ForgeCredentials; - /** - * - * @type {number} - * @memberof Repository - */ - 'credentials_id'?: number; - /** - * CredentialName is the name of the credentials associated with the enterprise. This field is now deprecated. Use CredentialsID instead. This field will be removed in v0.2.0. - * @type {string} - * @memberof Repository - */ - 'credentials_name'?: string; - /** - * - * @type {ForgeEndpoint} - * @memberof Repository - */ - 'endpoint'?: ForgeEndpoint; - /** - * - * @type {Array} - * @memberof Repository - */ - 'events'?: Array; - /** - * - * @type {string} - * @memberof Repository - */ - 'id'?: string; - /** - * - * @type {string} - * @memberof Repository - */ - 'name'?: string; - /** - * - * @type {string} - * @memberof Repository - */ - 'owner'?: string; - /** - * - * @type {Array} - * @memberof Repository - */ - 'pool'?: Array; - /** - * - * @type {string} - * @memberof Repository - */ - 'pool_balancing_type'?: string; - /** - * - * @type {PoolManagerStatus} - * @memberof Repository - */ - 'pool_manager_status'?: PoolManagerStatus; - /** - * - * @type {string} - * @memberof Repository - */ - 'updated_at'?: string; -} -/** - * - * @export - * @interface RunnerPrefix - */ -export interface RunnerPrefix { - /** - * - * @type {string} - * @memberof RunnerPrefix - */ - 'runner_prefix'?: string; -} -/** - * - * @export - * @interface ScaleSet - */ -export interface ScaleSet { - /** - * - * @type {string} - * @memberof ScaleSet - */ - 'created_at'?: string; - /** - * - * @type {number} - * @memberof ScaleSet - */ - 'desired_runner_count'?: number; - /** - * - * @type {boolean} - * @memberof ScaleSet - */ - 'disable_update'?: boolean; - /** - * - * @type {boolean} - * @memberof ScaleSet - */ - 'enabled'?: boolean; - /** - * - * @type {ForgeEndpoint} - * @memberof ScaleSet - */ - 'endpoint'?: ForgeEndpoint; - /** - * - * @type {string} - * @memberof ScaleSet - */ - 'enterprise_id'?: string; - /** - * - * @type {string} - * @memberof ScaleSet - */ - 'enterprise_name'?: string; - /** - * - * @type {string} - * @memberof ScaleSet - */ - 'extended_state'?: string; - /** - * ExtraSpecs is an opaque raw json that gets sent to the provider as part of the bootstrap params for instances. It can contain any kind of data needed by providers. The contents of this field means nothing to garm itself. We don\'t act on the information in this field at all. We only validate that it\'s a proper json. - * @type {object} - * @memberof ScaleSet - */ - 'extra_specs'?: object; - /** - * - * @type {string} - * @memberof ScaleSet - */ - 'flavor'?: string; - /** - * GithubRunnerGroup is the github runner group in which the runners will be added. The runner group must be created by someone with access to the enterprise. - * @type {string} - * @memberof ScaleSet - */ - 'github-runner-group'?: string; - /** - * - * @type {number} - * @memberof ScaleSet - */ - 'id'?: number; - /** - * - * @type {string} - * @memberof ScaleSet - */ - 'image'?: string; - /** - * - * @type {Array} - * @memberof ScaleSet - */ - 'instances'?: Array; - /** - * - * @type {number} - * @memberof ScaleSet - */ - 'max_runners'?: number; - /** - * - * @type {number} - * @memberof ScaleSet - */ - 'min_idle_runners'?: number; - /** - * - * @type {string} - * @memberof ScaleSet - */ - 'name'?: string; - /** - * - * @type {string} - * @memberof ScaleSet - */ - 'org_id'?: string; - /** - * - * @type {string} - * @memberof ScaleSet - */ - 'org_name'?: string; - /** - * - * @type {string} - * @memberof ScaleSet - */ - 'os_arch'?: string; - /** - * - * @type {string} - * @memberof ScaleSet - */ - 'os_type'?: string; - /** - * - * @type {string} - * @memberof ScaleSet - */ - 'provider_name'?: string; - /** - * - * @type {string} - * @memberof ScaleSet - */ - 'repo_id'?: string; - /** - * - * @type {string} - * @memberof ScaleSet - */ - 'repo_name'?: string; - /** - * - * @type {number} - * @memberof ScaleSet - */ - 'runner_bootstrap_timeout'?: number; - /** - * - * @type {string} - * @memberof ScaleSet - */ - 'runner_prefix'?: string; - /** - * - * @type {number} - * @memberof ScaleSet - */ - 'scale_set_id'?: number; - /** - * - * @type {string} - * @memberof ScaleSet - */ - 'state'?: string; - /** - * - * @type {Array} - * @memberof ScaleSet - */ - 'status_messages'?: Array; - /** - * - * @type {string} - * @memberof ScaleSet - */ - 'updated_at'?: string; -} -/** - * - * @export - * @interface StatusMessage - */ -export interface StatusMessage { - /** - * - * @type {string} - * @memberof StatusMessage - */ - 'created_at'?: string; - /** - * - * @type {string} - * @memberof StatusMessage - */ - 'event_level'?: string; - /** - * - * @type {string} - * @memberof StatusMessage - */ - 'event_type'?: string; - /** - * - * @type {string} - * @memberof StatusMessage - */ - 'message'?: string; -} -/** - * - * @export - * @interface Tag - */ -export interface Tag { - /** - * - * @type {string} - * @memberof Tag - */ - 'id'?: string; - /** - * - * @type {string} - * @memberof Tag - */ - 'name'?: string; -} -/** - * - * @export - * @interface UpdateControllerParams - */ -export interface UpdateControllerParams { - /** - * - * @type {string} - * @memberof UpdateControllerParams - */ - 'callback_url'?: string; - /** - * - * @type {string} - * @memberof UpdateControllerParams - */ - 'metadata_url'?: string; - /** - * - * @type {number} - * @memberof UpdateControllerParams - */ - 'minimum_job_age_backoff'?: number; - /** - * - * @type {string} - * @memberof UpdateControllerParams - */ - 'webhook_url'?: string; -} -/** - * - * @export - * @interface UpdateEntityParams - */ -export interface UpdateEntityParams { - /** - * - * @type {string} - * @memberof UpdateEntityParams - */ - 'credentials_name'?: string; - /** - * - * @type {string} - * @memberof UpdateEntityParams - */ - 'pool_balancer_type'?: string; - /** - * - * @type {string} - * @memberof UpdateEntityParams - */ - 'webhook_secret'?: string; -} -/** - * - * @export - * @interface UpdateGiteaCredentialsParams - */ -export interface UpdateGiteaCredentialsParams { - /** - * - * @type {string} - * @memberof UpdateGiteaCredentialsParams - */ - 'description'?: string; - /** - * - * @type {string} - * @memberof UpdateGiteaCredentialsParams - */ - 'name'?: string; - /** - * - * @type {GithubPAT} - * @memberof UpdateGiteaCredentialsParams - */ - 'pat'?: GithubPAT; -} -/** - * - * @export - * @interface UpdateGiteaEndpointParams - */ -export interface UpdateGiteaEndpointParams { - /** - * - * @type {string} - * @memberof UpdateGiteaEndpointParams - */ - 'api_base_url'?: string; - /** - * - * @type {string} - * @memberof UpdateGiteaEndpointParams - */ - 'base_url'?: string; - /** - * - * @type {Array} - * @memberof UpdateGiteaEndpointParams - */ - 'ca_cert_bundle'?: Array; - /** - * - * @type {string} - * @memberof UpdateGiteaEndpointParams - */ - 'description'?: string; -} -/** - * - * @export - * @interface UpdateGithubCredentialsParams - */ -export interface UpdateGithubCredentialsParams { - /** - * - * @type {GithubApp} - * @memberof UpdateGithubCredentialsParams - */ - 'app'?: GithubApp; - /** - * - * @type {string} - * @memberof UpdateGithubCredentialsParams - */ - 'description'?: string; - /** - * - * @type {string} - * @memberof UpdateGithubCredentialsParams - */ - 'name'?: string; - /** - * - * @type {GithubPAT} - * @memberof UpdateGithubCredentialsParams - */ - 'pat'?: GithubPAT; -} -/** - * - * @export - * @interface UpdateGithubEndpointParams - */ -export interface UpdateGithubEndpointParams { - /** - * - * @type {string} - * @memberof UpdateGithubEndpointParams - */ - 'api_base_url'?: string; - /** - * - * @type {string} - * @memberof UpdateGithubEndpointParams - */ - 'base_url'?: string; - /** - * - * @type {Array} - * @memberof UpdateGithubEndpointParams - */ - 'ca_cert_bundle'?: Array; - /** - * - * @type {string} - * @memberof UpdateGithubEndpointParams - */ - 'description'?: string; - /** - * - * @type {string} - * @memberof UpdateGithubEndpointParams - */ - 'upload_base_url'?: string; -} -/** - * - * @export - * @interface UpdatePoolParams - */ -export interface UpdatePoolParams { - /** - * - * @type {boolean} - * @memberof UpdatePoolParams - */ - 'enabled'?: boolean; - /** - * - * @type {object} - * @memberof UpdatePoolParams - */ - 'extra_specs'?: object; - /** - * - * @type {string} - * @memberof UpdatePoolParams - */ - 'flavor'?: string; - /** - * GithubRunnerGroup is the github runner group in which the runners of this pool will be added to. The runner group must be created by someone with access to the enterprise. - * @type {string} - * @memberof UpdatePoolParams - */ - 'github-runner-group'?: string; - /** - * - * @type {string} - * @memberof UpdatePoolParams - */ - 'image'?: string; - /** - * - * @type {number} - * @memberof UpdatePoolParams - */ - 'max_runners'?: number; - /** - * - * @type {number} - * @memberof UpdatePoolParams - */ - 'min_idle_runners'?: number; - /** - * - * @type {string} - * @memberof UpdatePoolParams - */ - 'os_arch'?: string; - /** - * - * @type {string} - * @memberof UpdatePoolParams - */ - 'os_type'?: string; - /** - * - * @type {number} - * @memberof UpdatePoolParams - */ - 'priority'?: number; - /** - * - * @type {number} - * @memberof UpdatePoolParams - */ - 'runner_bootstrap_timeout'?: number; - /** - * - * @type {string} - * @memberof UpdatePoolParams - */ - 'runner_prefix'?: string; - /** - * - * @type {Array} - * @memberof UpdatePoolParams - */ - 'tags'?: Array; -} -/** - * - * @export - * @interface UpdateScaleSetParams - */ -export interface UpdateScaleSetParams { - /** - * - * @type {boolean} - * @memberof UpdateScaleSetParams - */ - 'enabled'?: boolean; - /** - * - * @type {string} - * @memberof UpdateScaleSetParams - */ - 'extended_state'?: string; - /** - * - * @type {object} - * @memberof UpdateScaleSetParams - */ - 'extra_specs'?: object; - /** - * - * @type {string} - * @memberof UpdateScaleSetParams - */ - 'flavor'?: string; - /** - * - * @type {string} - * @memberof UpdateScaleSetParams - */ - 'image'?: string; - /** - * - * @type {number} - * @memberof UpdateScaleSetParams - */ - 'max_runners'?: number; - /** - * - * @type {number} - * @memberof UpdateScaleSetParams - */ - 'min_idle_runners'?: number; - /** - * - * @type {string} - * @memberof UpdateScaleSetParams - */ - 'name'?: string; - /** - * - * @type {string} - * @memberof UpdateScaleSetParams - */ - 'os_arch'?: string; - /** - * - * @type {string} - * @memberof UpdateScaleSetParams - */ - 'os_type'?: string; - /** - * - * @type {number} - * @memberof UpdateScaleSetParams - */ - 'runner_bootstrap_timeout'?: number; - /** - * GithubRunnerGroup is the github runner group in which the runners of this pool will be added to. The runner group must be created by someone with access to the enterprise. - * @type {string} - * @memberof UpdateScaleSetParams - */ - 'runner_group'?: string; - /** - * - * @type {string} - * @memberof UpdateScaleSetParams - */ - 'runner_prefix'?: string; - /** - * - * @type {string} - * @memberof UpdateScaleSetParams - */ - 'state'?: string; -} -/** - * Users holds information about a particular user - * @export - * @interface User - */ -export interface User { - /** - * - * @type {string} - * @memberof User - */ - 'created_at'?: string; - /** - * - * @type {string} - * @memberof User - */ - 'email'?: string; - /** - * - * @type {boolean} - * @memberof User - */ - 'enabled'?: boolean; - /** - * - * @type {string} - * @memberof User - */ - 'full_name'?: string; - /** - * - * @type {string} - * @memberof User - */ - 'id'?: string; - /** - * - * @type {boolean} - * @memberof User - */ - 'is_admin'?: boolean; - /** - * - * @type {string} - * @memberof User - */ - 'updated_at'?: string; - /** - * - * @type {string} - * @memberof User - */ - 'username'?: string; -} - -/** - * ControllerApi - axios parameter creator - * @export - */ -export const ControllerApiAxiosParamCreator = function (configuration?: Configuration) { - return { - /** - * - * @summary Update controller. - * @param {UpdateControllerParams} body Parameters used when updating the controller. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateController: async (body: UpdateControllerParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'body' is not null or undefined - assertParamExists('updateController', 'body', body) - const localVarPath = `/controller`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - } -}; - -/** - * ControllerApi - functional programming interface - * @export - */ -export const ControllerApiFp = function(configuration?: Configuration) { - const localVarAxiosParamCreator = ControllerApiAxiosParamCreator(configuration) - return { - /** - * - * @summary Update controller. - * @param {UpdateControllerParams} body Parameters used when updating the controller. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async updateController(body: UpdateControllerParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.updateController(body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['ControllerApi.updateController']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - } -}; - -/** - * ControllerApi - factory interface - * @export - */ -export const ControllerApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { - const localVarFp = ControllerApiFp(configuration) - return { - /** - * - * @summary Update controller. - * @param {UpdateControllerParams} body Parameters used when updating the controller. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateController(body: UpdateControllerParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.updateController(body, options).then((request) => request(axios, basePath)); - }, - }; -}; - -/** - * ControllerApi - object-oriented interface - * @export - * @class ControllerApi - * @extends {BaseAPI} - */ -export class ControllerApi extends BaseAPI { - /** - * - * @summary Update controller. - * @param {UpdateControllerParams} body Parameters used when updating the controller. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof ControllerApi - */ - public updateController(body: UpdateControllerParams, options?: RawAxiosRequestConfig) { - return ControllerApiFp(this.configuration).updateController(body, options).then((request) => request(this.axios, this.basePath)); - } -} - - - -/** - * ControllerInfoApi - axios parameter creator - * @export - */ -export const ControllerInfoApiAxiosParamCreator = function (configuration?: Configuration) { - return { - /** - * - * @summary Get controller info. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - controllerInfo: async (options: RawAxiosRequestConfig = {}): Promise => { - const localVarPath = `/controller-info`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - } -}; - -/** - * ControllerInfoApi - functional programming interface - * @export - */ -export const ControllerInfoApiFp = function(configuration?: Configuration) { - const localVarAxiosParamCreator = ControllerInfoApiAxiosParamCreator(configuration) - return { - /** - * - * @summary Get controller info. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async controllerInfo(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.controllerInfo(options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['ControllerInfoApi.controllerInfo']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - } -}; - -/** - * ControllerInfoApi - factory interface - * @export - */ -export const ControllerInfoApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { - const localVarFp = ControllerInfoApiFp(configuration) - return { - /** - * - * @summary Get controller info. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - controllerInfo(options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.controllerInfo(options).then((request) => request(axios, basePath)); - }, - }; -}; - -/** - * ControllerInfoApi - object-oriented interface - * @export - * @class ControllerInfoApi - * @extends {BaseAPI} - */ -export class ControllerInfoApi extends BaseAPI { - /** - * - * @summary Get controller info. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof ControllerInfoApi - */ - public controllerInfo(options?: RawAxiosRequestConfig) { - return ControllerInfoApiFp(this.configuration).controllerInfo(options).then((request) => request(this.axios, this.basePath)); - } -} - - - -/** - * CredentialsApi - axios parameter creator - * @export - */ -export const CredentialsApiAxiosParamCreator = function (configuration?: Configuration) { - return { - /** - * - * @summary Create a GitHub credential. - * @param {CreateGithubCredentialsParams} body Parameters used when creating a GitHub credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createCredentials: async (body: CreateGithubCredentialsParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'body' is not null or undefined - assertParamExists('createCredentials', 'body', body) - const localVarPath = `/github/credentials`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Create a Gitea credential. - * @param {CreateGiteaCredentialsParams} body Parameters used when creating a Gitea credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createGiteaCredentials: async (body: CreateGiteaCredentialsParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'body' is not null or undefined - assertParamExists('createGiteaCredentials', 'body', body) - const localVarPath = `/gitea/credentials`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Delete a GitHub credential. - * @param {number} id ID of the GitHub credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteCredentials: async (id: number, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'id' is not null or undefined - assertParamExists('deleteCredentials', 'id', id) - const localVarPath = `/github/credentials/{id}` - .replace(`{${"id"}}`, encodeURIComponent(String(id))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Delete a Gitea credential. - * @param {number} id ID of the Gitea credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteGiteaCredentials: async (id: number, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'id' is not null or undefined - assertParamExists('deleteGiteaCredentials', 'id', id) - const localVarPath = `/gitea/credentials/{id}` - .replace(`{${"id"}}`, encodeURIComponent(String(id))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Get a GitHub credential. - * @param {number} id ID of the GitHub credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getCredentials: async (id: number, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'id' is not null or undefined - assertParamExists('getCredentials', 'id', id) - const localVarPath = `/github/credentials/{id}` - .replace(`{${"id"}}`, encodeURIComponent(String(id))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Get a Gitea credential. - * @param {number} id ID of the Gitea credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getGiteaCredentials: async (id: number, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'id' is not null or undefined - assertParamExists('getGiteaCredentials', 'id', id) - const localVarPath = `/gitea/credentials/{id}` - .replace(`{${"id"}}`, encodeURIComponent(String(id))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List all credentials. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listCredentials: async (options: RawAxiosRequestConfig = {}): Promise => { - const localVarPath = `/github/credentials`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List all credentials. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listGiteaCredentials: async (options: RawAxiosRequestConfig = {}): Promise => { - const localVarPath = `/gitea/credentials`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Update a GitHub credential. - * @param {number} id ID of the GitHub credential. - * @param {UpdateGithubCredentialsParams} body Parameters used when updating a GitHub credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateCredentials: async (id: number, body: UpdateGithubCredentialsParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'id' is not null or undefined - assertParamExists('updateCredentials', 'id', id) - // verify required parameter 'body' is not null or undefined - assertParamExists('updateCredentials', 'body', body) - const localVarPath = `/github/credentials/{id}` - .replace(`{${"id"}}`, encodeURIComponent(String(id))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Update a Gitea credential. - * @param {number} id ID of the Gitea credential. - * @param {UpdateGiteaCredentialsParams} body Parameters used when updating a Gitea credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateGiteaCredentials: async (id: number, body: UpdateGiteaCredentialsParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'id' is not null or undefined - assertParamExists('updateGiteaCredentials', 'id', id) - // verify required parameter 'body' is not null or undefined - assertParamExists('updateGiteaCredentials', 'body', body) - const localVarPath = `/gitea/credentials/{id}` - .replace(`{${"id"}}`, encodeURIComponent(String(id))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - } -}; - -/** - * CredentialsApi - functional programming interface - * @export - */ -export const CredentialsApiFp = function(configuration?: Configuration) { - const localVarAxiosParamCreator = CredentialsApiAxiosParamCreator(configuration) - return { - /** - * - * @summary Create a GitHub credential. - * @param {CreateGithubCredentialsParams} body Parameters used when creating a GitHub credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async createCredentials(body: CreateGithubCredentialsParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.createCredentials(body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['CredentialsApi.createCredentials']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Create a Gitea credential. - * @param {CreateGiteaCredentialsParams} body Parameters used when creating a Gitea credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async createGiteaCredentials(body: CreateGiteaCredentialsParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.createGiteaCredentials(body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['CredentialsApi.createGiteaCredentials']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Delete a GitHub credential. - * @param {number} id ID of the GitHub credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async deleteCredentials(id: number, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.deleteCredentials(id, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['CredentialsApi.deleteCredentials']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Delete a Gitea credential. - * @param {number} id ID of the Gitea credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async deleteGiteaCredentials(id: number, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.deleteGiteaCredentials(id, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['CredentialsApi.deleteGiteaCredentials']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Get a GitHub credential. - * @param {number} id ID of the GitHub credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async getCredentials(id: number, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.getCredentials(id, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['CredentialsApi.getCredentials']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Get a Gitea credential. - * @param {number} id ID of the Gitea credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async getGiteaCredentials(id: number, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.getGiteaCredentials(id, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['CredentialsApi.getGiteaCredentials']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List all credentials. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listCredentials(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listCredentials(options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['CredentialsApi.listCredentials']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List all credentials. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listGiteaCredentials(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listGiteaCredentials(options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['CredentialsApi.listGiteaCredentials']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Update a GitHub credential. - * @param {number} id ID of the GitHub credential. - * @param {UpdateGithubCredentialsParams} body Parameters used when updating a GitHub credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async updateCredentials(id: number, body: UpdateGithubCredentialsParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.updateCredentials(id, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['CredentialsApi.updateCredentials']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Update a Gitea credential. - * @param {number} id ID of the Gitea credential. - * @param {UpdateGiteaCredentialsParams} body Parameters used when updating a Gitea credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async updateGiteaCredentials(id: number, body: UpdateGiteaCredentialsParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.updateGiteaCredentials(id, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['CredentialsApi.updateGiteaCredentials']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - } -}; - -/** - * CredentialsApi - factory interface - * @export - */ -export const CredentialsApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { - const localVarFp = CredentialsApiFp(configuration) - return { - /** - * - * @summary Create a GitHub credential. - * @param {CreateGithubCredentialsParams} body Parameters used when creating a GitHub credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createCredentials(body: CreateGithubCredentialsParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.createCredentials(body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Create a Gitea credential. - * @param {CreateGiteaCredentialsParams} body Parameters used when creating a Gitea credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createGiteaCredentials(body: CreateGiteaCredentialsParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.createGiteaCredentials(body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Delete a GitHub credential. - * @param {number} id ID of the GitHub credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteCredentials(id: number, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.deleteCredentials(id, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Delete a Gitea credential. - * @param {number} id ID of the Gitea credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteGiteaCredentials(id: number, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.deleteGiteaCredentials(id, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Get a GitHub credential. - * @param {number} id ID of the GitHub credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getCredentials(id: number, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.getCredentials(id, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Get a Gitea credential. - * @param {number} id ID of the Gitea credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getGiteaCredentials(id: number, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.getGiteaCredentials(id, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List all credentials. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listCredentials(options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listCredentials(options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List all credentials. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listGiteaCredentials(options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listGiteaCredentials(options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Update a GitHub credential. - * @param {number} id ID of the GitHub credential. - * @param {UpdateGithubCredentialsParams} body Parameters used when updating a GitHub credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateCredentials(id: number, body: UpdateGithubCredentialsParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.updateCredentials(id, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Update a Gitea credential. - * @param {number} id ID of the Gitea credential. - * @param {UpdateGiteaCredentialsParams} body Parameters used when updating a Gitea credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateGiteaCredentials(id: number, body: UpdateGiteaCredentialsParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.updateGiteaCredentials(id, body, options).then((request) => request(axios, basePath)); - }, - }; -}; - -/** - * CredentialsApi - object-oriented interface - * @export - * @class CredentialsApi - * @extends {BaseAPI} - */ -export class CredentialsApi extends BaseAPI { - /** - * - * @summary Create a GitHub credential. - * @param {CreateGithubCredentialsParams} body Parameters used when creating a GitHub credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof CredentialsApi - */ - public createCredentials(body: CreateGithubCredentialsParams, options?: RawAxiosRequestConfig) { - return CredentialsApiFp(this.configuration).createCredentials(body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Create a Gitea credential. - * @param {CreateGiteaCredentialsParams} body Parameters used when creating a Gitea credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof CredentialsApi - */ - public createGiteaCredentials(body: CreateGiteaCredentialsParams, options?: RawAxiosRequestConfig) { - return CredentialsApiFp(this.configuration).createGiteaCredentials(body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Delete a GitHub credential. - * @param {number} id ID of the GitHub credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof CredentialsApi - */ - public deleteCredentials(id: number, options?: RawAxiosRequestConfig) { - return CredentialsApiFp(this.configuration).deleteCredentials(id, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Delete a Gitea credential. - * @param {number} id ID of the Gitea credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof CredentialsApi - */ - public deleteGiteaCredentials(id: number, options?: RawAxiosRequestConfig) { - return CredentialsApiFp(this.configuration).deleteGiteaCredentials(id, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Get a GitHub credential. - * @param {number} id ID of the GitHub credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof CredentialsApi - */ - public getCredentials(id: number, options?: RawAxiosRequestConfig) { - return CredentialsApiFp(this.configuration).getCredentials(id, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Get a Gitea credential. - * @param {number} id ID of the Gitea credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof CredentialsApi - */ - public getGiteaCredentials(id: number, options?: RawAxiosRequestConfig) { - return CredentialsApiFp(this.configuration).getGiteaCredentials(id, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List all credentials. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof CredentialsApi - */ - public listCredentials(options?: RawAxiosRequestConfig) { - return CredentialsApiFp(this.configuration).listCredentials(options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List all credentials. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof CredentialsApi - */ - public listGiteaCredentials(options?: RawAxiosRequestConfig) { - return CredentialsApiFp(this.configuration).listGiteaCredentials(options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Update a GitHub credential. - * @param {number} id ID of the GitHub credential. - * @param {UpdateGithubCredentialsParams} body Parameters used when updating a GitHub credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof CredentialsApi - */ - public updateCredentials(id: number, body: UpdateGithubCredentialsParams, options?: RawAxiosRequestConfig) { - return CredentialsApiFp(this.configuration).updateCredentials(id, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Update a Gitea credential. - * @param {number} id ID of the Gitea credential. - * @param {UpdateGiteaCredentialsParams} body Parameters used when updating a Gitea credential. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof CredentialsApi - */ - public updateGiteaCredentials(id: number, body: UpdateGiteaCredentialsParams, options?: RawAxiosRequestConfig) { - return CredentialsApiFp(this.configuration).updateGiteaCredentials(id, body, options).then((request) => request(this.axios, this.basePath)); - } -} - - - -/** - * EndpointsApi - axios parameter creator - * @export - */ -export const EndpointsApiAxiosParamCreator = function (configuration?: Configuration) { - return { - /** - * - * @summary Create a Gitea Endpoint. - * @param {CreateGiteaEndpointParams} body Parameters used when creating a Gitea endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createGiteaEndpoint: async (body: CreateGiteaEndpointParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'body' is not null or undefined - assertParamExists('createGiteaEndpoint', 'body', body) - const localVarPath = `/gitea/endpoints`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Create a GitHub Endpoint. - * @param {CreateGithubEndpointParams} body Parameters used when creating a GitHub endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createGithubEndpoint: async (body: CreateGithubEndpointParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'body' is not null or undefined - assertParamExists('createGithubEndpoint', 'body', body) - const localVarPath = `/github/endpoints`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Delete a Gitea Endpoint. - * @param {string} name The name of the Gitea endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteGiteaEndpoint: async (name: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'name' is not null or undefined - assertParamExists('deleteGiteaEndpoint', 'name', name) - const localVarPath = `/gitea/endpoints/{name}` - .replace(`{${"name"}}`, encodeURIComponent(String(name))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Delete a GitHub Endpoint. - * @param {string} name The name of the GitHub endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteGithubEndpoint: async (name: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'name' is not null or undefined - assertParamExists('deleteGithubEndpoint', 'name', name) - const localVarPath = `/github/endpoints/{name}` - .replace(`{${"name"}}`, encodeURIComponent(String(name))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Get a Gitea Endpoint. - * @param {string} name The name of the Gitea endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getGiteaEndpoint: async (name: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'name' is not null or undefined - assertParamExists('getGiteaEndpoint', 'name', name) - const localVarPath = `/gitea/endpoints/{name}` - .replace(`{${"name"}}`, encodeURIComponent(String(name))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Get a GitHub Endpoint. - * @param {string} name The name of the GitHub endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getGithubEndpoint: async (name: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'name' is not null or undefined - assertParamExists('getGithubEndpoint', 'name', name) - const localVarPath = `/github/endpoints/{name}` - .replace(`{${"name"}}`, encodeURIComponent(String(name))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List all Gitea Endpoints. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listGiteaEndpoints: async (options: RawAxiosRequestConfig = {}): Promise => { - const localVarPath = `/gitea/endpoints`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List all GitHub Endpoints. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listGithubEndpoints: async (options: RawAxiosRequestConfig = {}): Promise => { - const localVarPath = `/github/endpoints`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Update a Gitea Endpoint. - * @param {string} name The name of the Gitea endpoint. - * @param {UpdateGiteaEndpointParams} body Parameters used when updating a Gitea endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateGiteaEndpoint: async (name: string, body: UpdateGiteaEndpointParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'name' is not null or undefined - assertParamExists('updateGiteaEndpoint', 'name', name) - // verify required parameter 'body' is not null or undefined - assertParamExists('updateGiteaEndpoint', 'body', body) - const localVarPath = `/gitea/endpoints/{name}` - .replace(`{${"name"}}`, encodeURIComponent(String(name))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Update a GitHub Endpoint. - * @param {string} name The name of the GitHub endpoint. - * @param {UpdateGithubEndpointParams} body Parameters used when updating a GitHub endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateGithubEndpoint: async (name: string, body: UpdateGithubEndpointParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'name' is not null or undefined - assertParamExists('updateGithubEndpoint', 'name', name) - // verify required parameter 'body' is not null or undefined - assertParamExists('updateGithubEndpoint', 'body', body) - const localVarPath = `/github/endpoints/{name}` - .replace(`{${"name"}}`, encodeURIComponent(String(name))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - } -}; - -/** - * EndpointsApi - functional programming interface - * @export - */ -export const EndpointsApiFp = function(configuration?: Configuration) { - const localVarAxiosParamCreator = EndpointsApiAxiosParamCreator(configuration) - return { - /** - * - * @summary Create a Gitea Endpoint. - * @param {CreateGiteaEndpointParams} body Parameters used when creating a Gitea endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async createGiteaEndpoint(body: CreateGiteaEndpointParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.createGiteaEndpoint(body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EndpointsApi.createGiteaEndpoint']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Create a GitHub Endpoint. - * @param {CreateGithubEndpointParams} body Parameters used when creating a GitHub endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async createGithubEndpoint(body: CreateGithubEndpointParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.createGithubEndpoint(body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EndpointsApi.createGithubEndpoint']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Delete a Gitea Endpoint. - * @param {string} name The name of the Gitea endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async deleteGiteaEndpoint(name: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.deleteGiteaEndpoint(name, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EndpointsApi.deleteGiteaEndpoint']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Delete a GitHub Endpoint. - * @param {string} name The name of the GitHub endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async deleteGithubEndpoint(name: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.deleteGithubEndpoint(name, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EndpointsApi.deleteGithubEndpoint']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Get a Gitea Endpoint. - * @param {string} name The name of the Gitea endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async getGiteaEndpoint(name: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.getGiteaEndpoint(name, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EndpointsApi.getGiteaEndpoint']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Get a GitHub Endpoint. - * @param {string} name The name of the GitHub endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async getGithubEndpoint(name: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.getGithubEndpoint(name, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EndpointsApi.getGithubEndpoint']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List all Gitea Endpoints. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listGiteaEndpoints(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listGiteaEndpoints(options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EndpointsApi.listGiteaEndpoints']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List all GitHub Endpoints. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listGithubEndpoints(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listGithubEndpoints(options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EndpointsApi.listGithubEndpoints']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Update a Gitea Endpoint. - * @param {string} name The name of the Gitea endpoint. - * @param {UpdateGiteaEndpointParams} body Parameters used when updating a Gitea endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async updateGiteaEndpoint(name: string, body: UpdateGiteaEndpointParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.updateGiteaEndpoint(name, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EndpointsApi.updateGiteaEndpoint']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Update a GitHub Endpoint. - * @param {string} name The name of the GitHub endpoint. - * @param {UpdateGithubEndpointParams} body Parameters used when updating a GitHub endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async updateGithubEndpoint(name: string, body: UpdateGithubEndpointParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.updateGithubEndpoint(name, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EndpointsApi.updateGithubEndpoint']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - } -}; - -/** - * EndpointsApi - factory interface - * @export - */ -export const EndpointsApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { - const localVarFp = EndpointsApiFp(configuration) - return { - /** - * - * @summary Create a Gitea Endpoint. - * @param {CreateGiteaEndpointParams} body Parameters used when creating a Gitea endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createGiteaEndpoint(body: CreateGiteaEndpointParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.createGiteaEndpoint(body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Create a GitHub Endpoint. - * @param {CreateGithubEndpointParams} body Parameters used when creating a GitHub endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createGithubEndpoint(body: CreateGithubEndpointParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.createGithubEndpoint(body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Delete a Gitea Endpoint. - * @param {string} name The name of the Gitea endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteGiteaEndpoint(name: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.deleteGiteaEndpoint(name, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Delete a GitHub Endpoint. - * @param {string} name The name of the GitHub endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteGithubEndpoint(name: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.deleteGithubEndpoint(name, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Get a Gitea Endpoint. - * @param {string} name The name of the Gitea endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getGiteaEndpoint(name: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.getGiteaEndpoint(name, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Get a GitHub Endpoint. - * @param {string} name The name of the GitHub endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getGithubEndpoint(name: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.getGithubEndpoint(name, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List all Gitea Endpoints. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listGiteaEndpoints(options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listGiteaEndpoints(options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List all GitHub Endpoints. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listGithubEndpoints(options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listGithubEndpoints(options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Update a Gitea Endpoint. - * @param {string} name The name of the Gitea endpoint. - * @param {UpdateGiteaEndpointParams} body Parameters used when updating a Gitea endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateGiteaEndpoint(name: string, body: UpdateGiteaEndpointParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.updateGiteaEndpoint(name, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Update a GitHub Endpoint. - * @param {string} name The name of the GitHub endpoint. - * @param {UpdateGithubEndpointParams} body Parameters used when updating a GitHub endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateGithubEndpoint(name: string, body: UpdateGithubEndpointParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.updateGithubEndpoint(name, body, options).then((request) => request(axios, basePath)); - }, - }; -}; - -/** - * EndpointsApi - object-oriented interface - * @export - * @class EndpointsApi - * @extends {BaseAPI} - */ -export class EndpointsApi extends BaseAPI { - /** - * - * @summary Create a Gitea Endpoint. - * @param {CreateGiteaEndpointParams} body Parameters used when creating a Gitea endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EndpointsApi - */ - public createGiteaEndpoint(body: CreateGiteaEndpointParams, options?: RawAxiosRequestConfig) { - return EndpointsApiFp(this.configuration).createGiteaEndpoint(body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Create a GitHub Endpoint. - * @param {CreateGithubEndpointParams} body Parameters used when creating a GitHub endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EndpointsApi - */ - public createGithubEndpoint(body: CreateGithubEndpointParams, options?: RawAxiosRequestConfig) { - return EndpointsApiFp(this.configuration).createGithubEndpoint(body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Delete a Gitea Endpoint. - * @param {string} name The name of the Gitea endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EndpointsApi - */ - public deleteGiteaEndpoint(name: string, options?: RawAxiosRequestConfig) { - return EndpointsApiFp(this.configuration).deleteGiteaEndpoint(name, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Delete a GitHub Endpoint. - * @param {string} name The name of the GitHub endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EndpointsApi - */ - public deleteGithubEndpoint(name: string, options?: RawAxiosRequestConfig) { - return EndpointsApiFp(this.configuration).deleteGithubEndpoint(name, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Get a Gitea Endpoint. - * @param {string} name The name of the Gitea endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EndpointsApi - */ - public getGiteaEndpoint(name: string, options?: RawAxiosRequestConfig) { - return EndpointsApiFp(this.configuration).getGiteaEndpoint(name, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Get a GitHub Endpoint. - * @param {string} name The name of the GitHub endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EndpointsApi - */ - public getGithubEndpoint(name: string, options?: RawAxiosRequestConfig) { - return EndpointsApiFp(this.configuration).getGithubEndpoint(name, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List all Gitea Endpoints. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EndpointsApi - */ - public listGiteaEndpoints(options?: RawAxiosRequestConfig) { - return EndpointsApiFp(this.configuration).listGiteaEndpoints(options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List all GitHub Endpoints. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EndpointsApi - */ - public listGithubEndpoints(options?: RawAxiosRequestConfig) { - return EndpointsApiFp(this.configuration).listGithubEndpoints(options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Update a Gitea Endpoint. - * @param {string} name The name of the Gitea endpoint. - * @param {UpdateGiteaEndpointParams} body Parameters used when updating a Gitea endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EndpointsApi - */ - public updateGiteaEndpoint(name: string, body: UpdateGiteaEndpointParams, options?: RawAxiosRequestConfig) { - return EndpointsApiFp(this.configuration).updateGiteaEndpoint(name, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Update a GitHub Endpoint. - * @param {string} name The name of the GitHub endpoint. - * @param {UpdateGithubEndpointParams} body Parameters used when updating a GitHub endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EndpointsApi - */ - public updateGithubEndpoint(name: string, body: UpdateGithubEndpointParams, options?: RawAxiosRequestConfig) { - return EndpointsApiFp(this.configuration).updateGithubEndpoint(name, body, options).then((request) => request(this.axios, this.basePath)); - } -} - - - -/** - * EnterprisesApi - axios parameter creator - * @export - */ -export const EnterprisesApiAxiosParamCreator = function (configuration?: Configuration) { - return { - /** - * - * @summary Create enterprise with the given parameters. - * @param {CreateEnterpriseParams} body Parameters used to create the enterprise. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createEnterprise: async (body: CreateEnterpriseParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'body' is not null or undefined - assertParamExists('createEnterprise', 'body', body) - const localVarPath = `/enterprises`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Create enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {CreatePoolParams} body Parameters used when creating the enterprise pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createEnterprisePool: async (enterpriseID: string, body: CreatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'enterpriseID' is not null or undefined - assertParamExists('createEnterprisePool', 'enterpriseID', enterpriseID) - // verify required parameter 'body' is not null or undefined - assertParamExists('createEnterprisePool', 'body', body) - const localVarPath = `/enterprises/{enterpriseID}/pools` - .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Create enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {CreateScaleSetParams} body Parameters used when creating the enterprise scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createEnterpriseScaleSet: async (enterpriseID: string, body: CreateScaleSetParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'enterpriseID' is not null or undefined - assertParamExists('createEnterpriseScaleSet', 'enterpriseID', enterpriseID) - // verify required parameter 'body' is not null or undefined - assertParamExists('createEnterpriseScaleSet', 'body', body) - const localVarPath = `/enterprises/{enterpriseID}/scalesets` - .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Delete enterprise by ID. - * @param {string} enterpriseID ID of the enterprise to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteEnterprise: async (enterpriseID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'enterpriseID' is not null or undefined - assertParamExists('deleteEnterprise', 'enterpriseID', enterpriseID) - const localVarPath = `/enterprises/{enterpriseID}` - .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Delete enterprise pool by ID. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID ID of the enterprise pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteEnterprisePool: async (enterpriseID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'enterpriseID' is not null or undefined - assertParamExists('deleteEnterprisePool', 'enterpriseID', enterpriseID) - // verify required parameter 'poolID' is not null or undefined - assertParamExists('deleteEnterprisePool', 'poolID', poolID) - const localVarPath = `/enterprises/{enterpriseID}/pools/{poolID}` - .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))) - .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Get enterprise by ID. - * @param {string} enterpriseID The ID of the enterprise to fetch. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getEnterprise: async (enterpriseID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'enterpriseID' is not null or undefined - assertParamExists('getEnterprise', 'enterpriseID', enterpriseID) - const localVarPath = `/enterprises/{enterpriseID}` - .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Get enterprise pool by ID. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getEnterprisePool: async (enterpriseID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'enterpriseID' is not null or undefined - assertParamExists('getEnterprisePool', 'enterpriseID', enterpriseID) - // verify required parameter 'poolID' is not null or undefined - assertParamExists('getEnterprisePool', 'poolID', poolID) - const localVarPath = `/enterprises/{enterpriseID}/pools/{poolID}` - .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))) - .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List enterprise instances. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listEnterpriseInstances: async (enterpriseID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'enterpriseID' is not null or undefined - assertParamExists('listEnterpriseInstances', 'enterpriseID', enterpriseID) - const localVarPath = `/enterprises/{enterpriseID}/instances` - .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List enterprise pools. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listEnterprisePools: async (enterpriseID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'enterpriseID' is not null or undefined - assertParamExists('listEnterprisePools', 'enterpriseID', enterpriseID) - const localVarPath = `/enterprises/{enterpriseID}/pools` - .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List enterprise scale sets. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listEnterpriseScaleSets: async (enterpriseID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'enterpriseID' is not null or undefined - assertParamExists('listEnterpriseScaleSets', 'enterpriseID', enterpriseID) - const localVarPath = `/enterprises/{enterpriseID}/scalesets` - .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List all enterprises. - * @param {string} [name] Exact enterprise name to filter by - * @param {string} [endpoint] Exact endpoint name to filter by - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listEnterprises: async (name?: string, endpoint?: string, options: RawAxiosRequestConfig = {}): Promise => { - const localVarPath = `/enterprises`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - if (name !== undefined) { - localVarQueryParameter['name'] = name; - } - - if (endpoint !== undefined) { - localVarQueryParameter['endpoint'] = endpoint; - } - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Update enterprise with the given parameters. - * @param {string} enterpriseID The ID of the enterprise to update. - * @param {UpdateEntityParams} body Parameters used when updating the enterprise. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateEnterprise: async (enterpriseID: string, body: UpdateEntityParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'enterpriseID' is not null or undefined - assertParamExists('updateEnterprise', 'enterpriseID', enterpriseID) - // verify required parameter 'body' is not null or undefined - assertParamExists('updateEnterprise', 'body', body) - const localVarPath = `/enterprises/{enterpriseID}` - .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Update enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID ID of the enterprise pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the enterprise pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateEnterprisePool: async (enterpriseID: string, poolID: string, body: UpdatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'enterpriseID' is not null or undefined - assertParamExists('updateEnterprisePool', 'enterpriseID', enterpriseID) - // verify required parameter 'poolID' is not null or undefined - assertParamExists('updateEnterprisePool', 'poolID', poolID) - // verify required parameter 'body' is not null or undefined - assertParamExists('updateEnterprisePool', 'body', body) - const localVarPath = `/enterprises/{enterpriseID}/pools/{poolID}` - .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))) - .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - } -}; - -/** - * EnterprisesApi - functional programming interface - * @export - */ -export const EnterprisesApiFp = function(configuration?: Configuration) { - const localVarAxiosParamCreator = EnterprisesApiAxiosParamCreator(configuration) - return { - /** - * - * @summary Create enterprise with the given parameters. - * @param {CreateEnterpriseParams} body Parameters used to create the enterprise. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async createEnterprise(body: CreateEnterpriseParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.createEnterprise(body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.createEnterprise']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Create enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {CreatePoolParams} body Parameters used when creating the enterprise pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async createEnterprisePool(enterpriseID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.createEnterprisePool(enterpriseID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.createEnterprisePool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Create enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {CreateScaleSetParams} body Parameters used when creating the enterprise scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async createEnterpriseScaleSet(enterpriseID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.createEnterpriseScaleSet(enterpriseID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.createEnterpriseScaleSet']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Delete enterprise by ID. - * @param {string} enterpriseID ID of the enterprise to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async deleteEnterprise(enterpriseID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.deleteEnterprise(enterpriseID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.deleteEnterprise']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Delete enterprise pool by ID. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID ID of the enterprise pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async deleteEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.deleteEnterprisePool(enterpriseID, poolID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.deleteEnterprisePool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Get enterprise by ID. - * @param {string} enterpriseID The ID of the enterprise to fetch. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async getEnterprise(enterpriseID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.getEnterprise(enterpriseID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.getEnterprise']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Get enterprise pool by ID. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async getEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.getEnterprisePool(enterpriseID, poolID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.getEnterprisePool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List enterprise instances. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listEnterpriseInstances(enterpriseID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listEnterpriseInstances(enterpriseID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.listEnterpriseInstances']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List enterprise pools. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listEnterprisePools(enterpriseID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listEnterprisePools(enterpriseID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.listEnterprisePools']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List enterprise scale sets. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listEnterpriseScaleSets(enterpriseID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listEnterpriseScaleSets(enterpriseID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.listEnterpriseScaleSets']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List all enterprises. - * @param {string} [name] Exact enterprise name to filter by - * @param {string} [endpoint] Exact endpoint name to filter by - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listEnterprises(name?: string, endpoint?: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listEnterprises(name, endpoint, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.listEnterprises']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Update enterprise with the given parameters. - * @param {string} enterpriseID The ID of the enterprise to update. - * @param {UpdateEntityParams} body Parameters used when updating the enterprise. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async updateEnterprise(enterpriseID: string, body: UpdateEntityParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.updateEnterprise(enterpriseID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.updateEnterprise']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Update enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID ID of the enterprise pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the enterprise pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async updateEnterprisePool(enterpriseID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.updateEnterprisePool(enterpriseID, poolID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.updateEnterprisePool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - } -}; - -/** - * EnterprisesApi - factory interface - * @export - */ -export const EnterprisesApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { - const localVarFp = EnterprisesApiFp(configuration) - return { - /** - * - * @summary Create enterprise with the given parameters. - * @param {CreateEnterpriseParams} body Parameters used to create the enterprise. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createEnterprise(body: CreateEnterpriseParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.createEnterprise(body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Create enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {CreatePoolParams} body Parameters used when creating the enterprise pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createEnterprisePool(enterpriseID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.createEnterprisePool(enterpriseID, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Create enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {CreateScaleSetParams} body Parameters used when creating the enterprise scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createEnterpriseScaleSet(enterpriseID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.createEnterpriseScaleSet(enterpriseID, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Delete enterprise by ID. - * @param {string} enterpriseID ID of the enterprise to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteEnterprise(enterpriseID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.deleteEnterprise(enterpriseID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Delete enterprise pool by ID. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID ID of the enterprise pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.deleteEnterprisePool(enterpriseID, poolID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Get enterprise by ID. - * @param {string} enterpriseID The ID of the enterprise to fetch. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getEnterprise(enterpriseID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.getEnterprise(enterpriseID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Get enterprise pool by ID. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.getEnterprisePool(enterpriseID, poolID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List enterprise instances. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listEnterpriseInstances(enterpriseID: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listEnterpriseInstances(enterpriseID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List enterprise pools. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listEnterprisePools(enterpriseID: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listEnterprisePools(enterpriseID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List enterprise scale sets. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listEnterpriseScaleSets(enterpriseID: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listEnterpriseScaleSets(enterpriseID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List all enterprises. - * @param {string} [name] Exact enterprise name to filter by - * @param {string} [endpoint] Exact endpoint name to filter by - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listEnterprises(name?: string, endpoint?: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listEnterprises(name, endpoint, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Update enterprise with the given parameters. - * @param {string} enterpriseID The ID of the enterprise to update. - * @param {UpdateEntityParams} body Parameters used when updating the enterprise. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateEnterprise(enterpriseID: string, body: UpdateEntityParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.updateEnterprise(enterpriseID, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Update enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID ID of the enterprise pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the enterprise pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateEnterprisePool(enterpriseID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.updateEnterprisePool(enterpriseID, poolID, body, options).then((request) => request(axios, basePath)); - }, - }; -}; - -/** - * EnterprisesApi - object-oriented interface - * @export - * @class EnterprisesApi - * @extends {BaseAPI} - */ -export class EnterprisesApi extends BaseAPI { - /** - * - * @summary Create enterprise with the given parameters. - * @param {CreateEnterpriseParams} body Parameters used to create the enterprise. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EnterprisesApi - */ - public createEnterprise(body: CreateEnterpriseParams, options?: RawAxiosRequestConfig) { - return EnterprisesApiFp(this.configuration).createEnterprise(body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Create enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {CreatePoolParams} body Parameters used when creating the enterprise pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EnterprisesApi - */ - public createEnterprisePool(enterpriseID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig) { - return EnterprisesApiFp(this.configuration).createEnterprisePool(enterpriseID, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Create enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {CreateScaleSetParams} body Parameters used when creating the enterprise scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EnterprisesApi - */ - public createEnterpriseScaleSet(enterpriseID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig) { - return EnterprisesApiFp(this.configuration).createEnterpriseScaleSet(enterpriseID, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Delete enterprise by ID. - * @param {string} enterpriseID ID of the enterprise to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EnterprisesApi - */ - public deleteEnterprise(enterpriseID: string, options?: RawAxiosRequestConfig) { - return EnterprisesApiFp(this.configuration).deleteEnterprise(enterpriseID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Delete enterprise pool by ID. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID ID of the enterprise pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EnterprisesApi - */ - public deleteEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig) { - return EnterprisesApiFp(this.configuration).deleteEnterprisePool(enterpriseID, poolID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Get enterprise by ID. - * @param {string} enterpriseID The ID of the enterprise to fetch. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EnterprisesApi - */ - public getEnterprise(enterpriseID: string, options?: RawAxiosRequestConfig) { - return EnterprisesApiFp(this.configuration).getEnterprise(enterpriseID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Get enterprise pool by ID. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EnterprisesApi - */ - public getEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig) { - return EnterprisesApiFp(this.configuration).getEnterprisePool(enterpriseID, poolID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List enterprise instances. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EnterprisesApi - */ - public listEnterpriseInstances(enterpriseID: string, options?: RawAxiosRequestConfig) { - return EnterprisesApiFp(this.configuration).listEnterpriseInstances(enterpriseID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List enterprise pools. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EnterprisesApi - */ - public listEnterprisePools(enterpriseID: string, options?: RawAxiosRequestConfig) { - return EnterprisesApiFp(this.configuration).listEnterprisePools(enterpriseID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List enterprise scale sets. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EnterprisesApi - */ - public listEnterpriseScaleSets(enterpriseID: string, options?: RawAxiosRequestConfig) { - return EnterprisesApiFp(this.configuration).listEnterpriseScaleSets(enterpriseID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List all enterprises. - * @param {string} [name] Exact enterprise name to filter by - * @param {string} [endpoint] Exact endpoint name to filter by - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EnterprisesApi - */ - public listEnterprises(name?: string, endpoint?: string, options?: RawAxiosRequestConfig) { - return EnterprisesApiFp(this.configuration).listEnterprises(name, endpoint, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Update enterprise with the given parameters. - * @param {string} enterpriseID The ID of the enterprise to update. - * @param {UpdateEntityParams} body Parameters used when updating the enterprise. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EnterprisesApi - */ - public updateEnterprise(enterpriseID: string, body: UpdateEntityParams, options?: RawAxiosRequestConfig) { - return EnterprisesApiFp(this.configuration).updateEnterprise(enterpriseID, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Update enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID ID of the enterprise pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the enterprise pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof EnterprisesApi - */ - public updateEnterprisePool(enterpriseID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig) { - return EnterprisesApiFp(this.configuration).updateEnterprisePool(enterpriseID, poolID, body, options).then((request) => request(this.axios, this.basePath)); - } -} - - - -/** - * FirstRunApi - axios parameter creator - * @export - */ -export const FirstRunApiAxiosParamCreator = function (configuration?: Configuration) { - return { - /** - * - * @summary Initialize the first run of the controller. - * @param {NewUserParams} body Create a new user. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - firstRun: async (body: NewUserParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'body' is not null or undefined - assertParamExists('firstRun', 'body', body) - const localVarPath = `/first-run`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - } -}; - -/** - * FirstRunApi - functional programming interface - * @export - */ -export const FirstRunApiFp = function(configuration?: Configuration) { - const localVarAxiosParamCreator = FirstRunApiAxiosParamCreator(configuration) - return { - /** - * - * @summary Initialize the first run of the controller. - * @param {NewUserParams} body Create a new user. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async firstRun(body: NewUserParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.firstRun(body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['FirstRunApi.firstRun']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - } -}; - -/** - * FirstRunApi - factory interface - * @export - */ -export const FirstRunApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { - const localVarFp = FirstRunApiFp(configuration) - return { - /** - * - * @summary Initialize the first run of the controller. - * @param {NewUserParams} body Create a new user. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - firstRun(body: NewUserParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.firstRun(body, options).then((request) => request(axios, basePath)); - }, - }; -}; - -/** - * FirstRunApi - object-oriented interface - * @export - * @class FirstRunApi - * @extends {BaseAPI} - */ -export class FirstRunApi extends BaseAPI { - /** - * - * @summary Initialize the first run of the controller. - * @param {NewUserParams} body Create a new user. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof FirstRunApi - */ - public firstRun(body: NewUserParams, options?: RawAxiosRequestConfig) { - return FirstRunApiFp(this.configuration).firstRun(body, options).then((request) => request(this.axios, this.basePath)); - } -} - - - -/** - * HooksApi - axios parameter creator - * @export - */ -export const HooksApiAxiosParamCreator = function (configuration?: Configuration) { - return { - /** - * - * @summary Get information about the GARM installed webhook on an organization. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getOrgWebhookInfo: async (orgID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('getOrgWebhookInfo', 'orgID', orgID) - const localVarPath = `/organizations/{orgID}/webhook` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Get information about the GARM installed webhook on a repository. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getRepoWebhookInfo: async (repoID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('getRepoWebhookInfo', 'repoID', repoID) - const localVarPath = `/repositories/{repoID}/webhook` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. - * @param {string} orgID Organization ID. - * @param {InstallWebhookParams} body Parameters used when creating the organization webhook. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - installOrgWebhook: async (orgID: string, body: InstallWebhookParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('installOrgWebhook', 'orgID', orgID) - // verify required parameter 'body' is not null or undefined - assertParamExists('installOrgWebhook', 'body', body) - const localVarPath = `/organizations/{orgID}/webhook` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. - * @param {string} repoID Repository ID. - * @param {InstallWebhookParams} body Parameters used when creating the repository webhook. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - installRepoWebhook: async (repoID: string, body: InstallWebhookParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('installRepoWebhook', 'repoID', repoID) - // verify required parameter 'body' is not null or undefined - assertParamExists('installRepoWebhook', 'body', body) - const localVarPath = `/repositories/{repoID}/webhook` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Uninstall organization webhook. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - uninstallOrgWebhook: async (orgID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('uninstallOrgWebhook', 'orgID', orgID) - const localVarPath = `/organizations/{orgID}/webhook` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Uninstall organization webhook. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - uninstallRepoWebhook: async (repoID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('uninstallRepoWebhook', 'repoID', repoID) - const localVarPath = `/repositories/{repoID}/webhook` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - } -}; - -/** - * HooksApi - functional programming interface - * @export - */ -export const HooksApiFp = function(configuration?: Configuration) { - const localVarAxiosParamCreator = HooksApiAxiosParamCreator(configuration) - return { - /** - * - * @summary Get information about the GARM installed webhook on an organization. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async getOrgWebhookInfo(orgID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.getOrgWebhookInfo(orgID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['HooksApi.getOrgWebhookInfo']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Get information about the GARM installed webhook on a repository. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async getRepoWebhookInfo(repoID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.getRepoWebhookInfo(repoID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['HooksApi.getRepoWebhookInfo']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. - * @param {string} orgID Organization ID. - * @param {InstallWebhookParams} body Parameters used when creating the organization webhook. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async installOrgWebhook(orgID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.installOrgWebhook(orgID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['HooksApi.installOrgWebhook']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. - * @param {string} repoID Repository ID. - * @param {InstallWebhookParams} body Parameters used when creating the repository webhook. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async installRepoWebhook(repoID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.installRepoWebhook(repoID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['HooksApi.installRepoWebhook']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Uninstall organization webhook. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async uninstallOrgWebhook(orgID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.uninstallOrgWebhook(orgID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['HooksApi.uninstallOrgWebhook']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Uninstall organization webhook. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async uninstallRepoWebhook(repoID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.uninstallRepoWebhook(repoID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['HooksApi.uninstallRepoWebhook']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - } -}; - -/** - * HooksApi - factory interface - * @export - */ -export const HooksApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { - const localVarFp = HooksApiFp(configuration) - return { - /** - * - * @summary Get information about the GARM installed webhook on an organization. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getOrgWebhookInfo(orgID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.getOrgWebhookInfo(orgID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Get information about the GARM installed webhook on a repository. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getRepoWebhookInfo(repoID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.getRepoWebhookInfo(repoID, options).then((request) => request(axios, basePath)); - }, - /** - * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. - * @param {string} orgID Organization ID. - * @param {InstallWebhookParams} body Parameters used when creating the organization webhook. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - installOrgWebhook(orgID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.installOrgWebhook(orgID, body, options).then((request) => request(axios, basePath)); - }, - /** - * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. - * @param {string} repoID Repository ID. - * @param {InstallWebhookParams} body Parameters used when creating the repository webhook. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - installRepoWebhook(repoID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.installRepoWebhook(repoID, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Uninstall organization webhook. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - uninstallOrgWebhook(orgID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.uninstallOrgWebhook(orgID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Uninstall organization webhook. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - uninstallRepoWebhook(repoID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.uninstallRepoWebhook(repoID, options).then((request) => request(axios, basePath)); - }, - }; -}; - -/** - * HooksApi - object-oriented interface - * @export - * @class HooksApi - * @extends {BaseAPI} - */ -export class HooksApi extends BaseAPI { - /** - * - * @summary Get information about the GARM installed webhook on an organization. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof HooksApi - */ - public getOrgWebhookInfo(orgID: string, options?: RawAxiosRequestConfig) { - return HooksApiFp(this.configuration).getOrgWebhookInfo(orgID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Get information about the GARM installed webhook on a repository. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof HooksApi - */ - public getRepoWebhookInfo(repoID: string, options?: RawAxiosRequestConfig) { - return HooksApiFp(this.configuration).getRepoWebhookInfo(repoID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. - * @param {string} orgID Organization ID. - * @param {InstallWebhookParams} body Parameters used when creating the organization webhook. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof HooksApi - */ - public installOrgWebhook(orgID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig) { - return HooksApiFp(this.configuration).installOrgWebhook(orgID, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. - * @param {string} repoID Repository ID. - * @param {InstallWebhookParams} body Parameters used when creating the repository webhook. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof HooksApi - */ - public installRepoWebhook(repoID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig) { - return HooksApiFp(this.configuration).installRepoWebhook(repoID, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Uninstall organization webhook. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof HooksApi - */ - public uninstallOrgWebhook(orgID: string, options?: RawAxiosRequestConfig) { - return HooksApiFp(this.configuration).uninstallOrgWebhook(orgID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Uninstall organization webhook. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof HooksApi - */ - public uninstallRepoWebhook(repoID: string, options?: RawAxiosRequestConfig) { - return HooksApiFp(this.configuration).uninstallRepoWebhook(repoID, options).then((request) => request(this.axios, this.basePath)); - } -} - - - -/** - * InstancesApi - axios parameter creator - * @export - */ -export const InstancesApiAxiosParamCreator = function (configuration?: Configuration) { - return { - /** - * - * @summary Delete runner instance by name. - * @param {string} instanceName Runner instance name. - * @param {boolean} [forceRemove] If true GARM will ignore any provider error when removing the runner and will continue to remove the runner from github and the GARM database. - * @param {boolean} [bypassGHUnauthorized] If true GARM will ignore unauthorized errors returned by GitHub when removing a runner. This is useful if you want to clean up runners and your credentials have expired. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteInstance: async (instanceName: string, forceRemove?: boolean, bypassGHUnauthorized?: boolean, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'instanceName' is not null or undefined - assertParamExists('deleteInstance', 'instanceName', instanceName) - const localVarPath = `/instances/{instanceName}` - .replace(`{${"instanceName"}}`, encodeURIComponent(String(instanceName))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - if (forceRemove !== undefined) { - localVarQueryParameter['forceRemove'] = forceRemove; - } - - if (bypassGHUnauthorized !== undefined) { - localVarQueryParameter['bypassGHUnauthorized'] = bypassGHUnauthorized; - } - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Get runner instance by name. - * @param {string} instanceName Runner instance name. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getInstance: async (instanceName: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'instanceName' is not null or undefined - assertParamExists('getInstance', 'instanceName', instanceName) - const localVarPath = `/instances/{instanceName}` - .replace(`{${"instanceName"}}`, encodeURIComponent(String(instanceName))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List enterprise instances. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listEnterpriseInstances: async (enterpriseID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'enterpriseID' is not null or undefined - assertParamExists('listEnterpriseInstances', 'enterpriseID', enterpriseID) - const localVarPath = `/enterprises/{enterpriseID}/instances` - .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Get all runners\' instances. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listInstances: async (options: RawAxiosRequestConfig = {}): Promise => { - const localVarPath = `/instances`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List organization instances. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listOrgInstances: async (orgID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('listOrgInstances', 'orgID', orgID) - const localVarPath = `/organizations/{orgID}/instances` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List runner instances in a pool. - * @param {string} poolID Runner pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listPoolInstances: async (poolID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'poolID' is not null or undefined - assertParamExists('listPoolInstances', 'poolID', poolID) - const localVarPath = `/pools/{poolID}/instances` - .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List repository instances. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listRepoInstances: async (repoID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('listRepoInstances', 'repoID', repoID) - const localVarPath = `/repositories/{repoID}/instances` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List runner instances in a scale set. - * @param {string} scalesetID Runner scale set ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listScaleSetInstances: async (scalesetID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'scalesetID' is not null or undefined - assertParamExists('listScaleSetInstances', 'scalesetID', scalesetID) - const localVarPath = `/scalesets/{scalesetID}/instances` - .replace(`{${"scalesetID"}}`, encodeURIComponent(String(scalesetID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - } -}; - -/** - * InstancesApi - functional programming interface - * @export - */ -export const InstancesApiFp = function(configuration?: Configuration) { - const localVarAxiosParamCreator = InstancesApiAxiosParamCreator(configuration) - return { - /** - * - * @summary Delete runner instance by name. - * @param {string} instanceName Runner instance name. - * @param {boolean} [forceRemove] If true GARM will ignore any provider error when removing the runner and will continue to remove the runner from github and the GARM database. - * @param {boolean} [bypassGHUnauthorized] If true GARM will ignore unauthorized errors returned by GitHub when removing a runner. This is useful if you want to clean up runners and your credentials have expired. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async deleteInstance(instanceName: string, forceRemove?: boolean, bypassGHUnauthorized?: boolean, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.deleteInstance(instanceName, forceRemove, bypassGHUnauthorized, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['InstancesApi.deleteInstance']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Get runner instance by name. - * @param {string} instanceName Runner instance name. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async getInstance(instanceName: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.getInstance(instanceName, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['InstancesApi.getInstance']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List enterprise instances. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listEnterpriseInstances(enterpriseID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listEnterpriseInstances(enterpriseID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['InstancesApi.listEnterpriseInstances']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Get all runners\' instances. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listInstances(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listInstances(options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['InstancesApi.listInstances']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List organization instances. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listOrgInstances(orgID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listOrgInstances(orgID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['InstancesApi.listOrgInstances']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List runner instances in a pool. - * @param {string} poolID Runner pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listPoolInstances(poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listPoolInstances(poolID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['InstancesApi.listPoolInstances']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List repository instances. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listRepoInstances(repoID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listRepoInstances(repoID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['InstancesApi.listRepoInstances']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List runner instances in a scale set. - * @param {string} scalesetID Runner scale set ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listScaleSetInstances(scalesetID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listScaleSetInstances(scalesetID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['InstancesApi.listScaleSetInstances']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - } -}; - -/** - * InstancesApi - factory interface - * @export - */ -export const InstancesApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { - const localVarFp = InstancesApiFp(configuration) - return { - /** - * - * @summary Delete runner instance by name. - * @param {string} instanceName Runner instance name. - * @param {boolean} [forceRemove] If true GARM will ignore any provider error when removing the runner and will continue to remove the runner from github and the GARM database. - * @param {boolean} [bypassGHUnauthorized] If true GARM will ignore unauthorized errors returned by GitHub when removing a runner. This is useful if you want to clean up runners and your credentials have expired. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteInstance(instanceName: string, forceRemove?: boolean, bypassGHUnauthorized?: boolean, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.deleteInstance(instanceName, forceRemove, bypassGHUnauthorized, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Get runner instance by name. - * @param {string} instanceName Runner instance name. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getInstance(instanceName: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.getInstance(instanceName, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List enterprise instances. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listEnterpriseInstances(enterpriseID: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listEnterpriseInstances(enterpriseID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Get all runners\' instances. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listInstances(options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listInstances(options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List organization instances. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listOrgInstances(orgID: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listOrgInstances(orgID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List runner instances in a pool. - * @param {string} poolID Runner pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listPoolInstances(poolID: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listPoolInstances(poolID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List repository instances. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listRepoInstances(repoID: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listRepoInstances(repoID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List runner instances in a scale set. - * @param {string} scalesetID Runner scale set ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listScaleSetInstances(scalesetID: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listScaleSetInstances(scalesetID, options).then((request) => request(axios, basePath)); - }, - }; -}; - -/** - * InstancesApi - object-oriented interface - * @export - * @class InstancesApi - * @extends {BaseAPI} - */ -export class InstancesApi extends BaseAPI { - /** - * - * @summary Delete runner instance by name. - * @param {string} instanceName Runner instance name. - * @param {boolean} [forceRemove] If true GARM will ignore any provider error when removing the runner and will continue to remove the runner from github and the GARM database. - * @param {boolean} [bypassGHUnauthorized] If true GARM will ignore unauthorized errors returned by GitHub when removing a runner. This is useful if you want to clean up runners and your credentials have expired. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof InstancesApi - */ - public deleteInstance(instanceName: string, forceRemove?: boolean, bypassGHUnauthorized?: boolean, options?: RawAxiosRequestConfig) { - return InstancesApiFp(this.configuration).deleteInstance(instanceName, forceRemove, bypassGHUnauthorized, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Get runner instance by name. - * @param {string} instanceName Runner instance name. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof InstancesApi - */ - public getInstance(instanceName: string, options?: RawAxiosRequestConfig) { - return InstancesApiFp(this.configuration).getInstance(instanceName, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List enterprise instances. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof InstancesApi - */ - public listEnterpriseInstances(enterpriseID: string, options?: RawAxiosRequestConfig) { - return InstancesApiFp(this.configuration).listEnterpriseInstances(enterpriseID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Get all runners\' instances. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof InstancesApi - */ - public listInstances(options?: RawAxiosRequestConfig) { - return InstancesApiFp(this.configuration).listInstances(options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List organization instances. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof InstancesApi - */ - public listOrgInstances(orgID: string, options?: RawAxiosRequestConfig) { - return InstancesApiFp(this.configuration).listOrgInstances(orgID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List runner instances in a pool. - * @param {string} poolID Runner pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof InstancesApi - */ - public listPoolInstances(poolID: string, options?: RawAxiosRequestConfig) { - return InstancesApiFp(this.configuration).listPoolInstances(poolID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List repository instances. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof InstancesApi - */ - public listRepoInstances(repoID: string, options?: RawAxiosRequestConfig) { - return InstancesApiFp(this.configuration).listRepoInstances(repoID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List runner instances in a scale set. - * @param {string} scalesetID Runner scale set ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof InstancesApi - */ - public listScaleSetInstances(scalesetID: string, options?: RawAxiosRequestConfig) { - return InstancesApiFp(this.configuration).listScaleSetInstances(scalesetID, options).then((request) => request(this.axios, this.basePath)); - } -} - - - -/** - * JobsApi - axios parameter creator - * @export - */ -export const JobsApiAxiosParamCreator = function (configuration?: Configuration) { - return { - /** - * - * @summary List all jobs. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listJobs: async (options: RawAxiosRequestConfig = {}): Promise => { - const localVarPath = `/jobs`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - } -}; - -/** - * JobsApi - functional programming interface - * @export - */ -export const JobsApiFp = function(configuration?: Configuration) { - const localVarAxiosParamCreator = JobsApiAxiosParamCreator(configuration) - return { - /** - * - * @summary List all jobs. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listJobs(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listJobs(options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['JobsApi.listJobs']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - } -}; - -/** - * JobsApi - factory interface - * @export - */ -export const JobsApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { - const localVarFp = JobsApiFp(configuration) - return { - /** - * - * @summary List all jobs. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listJobs(options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listJobs(options).then((request) => request(axios, basePath)); - }, - }; -}; - -/** - * JobsApi - object-oriented interface - * @export - * @class JobsApi - * @extends {BaseAPI} - */ -export class JobsApi extends BaseAPI { - /** - * - * @summary List all jobs. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof JobsApi - */ - public listJobs(options?: RawAxiosRequestConfig) { - return JobsApiFp(this.configuration).listJobs(options).then((request) => request(this.axios, this.basePath)); - } -} - - - -/** - * LoginApi - axios parameter creator - * @export - */ -export const LoginApiAxiosParamCreator = function (configuration?: Configuration) { - return { - /** - * - * @summary Logs in a user and returns a JWT token. - * @param {PasswordLoginParams} body Login information. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - login: async (body: PasswordLoginParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'body' is not null or undefined - assertParamExists('login', 'body', body) - const localVarPath = `/auth/login`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - } -}; - -/** - * LoginApi - functional programming interface - * @export - */ -export const LoginApiFp = function(configuration?: Configuration) { - const localVarAxiosParamCreator = LoginApiAxiosParamCreator(configuration) - return { - /** - * - * @summary Logs in a user and returns a JWT token. - * @param {PasswordLoginParams} body Login information. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async login(body: PasswordLoginParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.login(body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['LoginApi.login']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - } -}; - -/** - * LoginApi - factory interface - * @export - */ -export const LoginApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { - const localVarFp = LoginApiFp(configuration) - return { - /** - * - * @summary Logs in a user and returns a JWT token. - * @param {PasswordLoginParams} body Login information. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - login(body: PasswordLoginParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.login(body, options).then((request) => request(axios, basePath)); - }, - }; -}; - -/** - * LoginApi - object-oriented interface - * @export - * @class LoginApi - * @extends {BaseAPI} - */ -export class LoginApi extends BaseAPI { - /** - * - * @summary Logs in a user and returns a JWT token. - * @param {PasswordLoginParams} body Login information. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof LoginApi - */ - public login(body: PasswordLoginParams, options?: RawAxiosRequestConfig) { - return LoginApiFp(this.configuration).login(body, options).then((request) => request(this.axios, this.basePath)); - } -} - - - -/** - * MetricsTokenApi - axios parameter creator - * @export - */ -export const MetricsTokenApiAxiosParamCreator = function (configuration?: Configuration) { - return { - /** - * - * @summary Returns a JWT token that can be used to access the metrics endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getMetricsToken: async (options: RawAxiosRequestConfig = {}): Promise => { - const localVarPath = `/metrics-token`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - } -}; - -/** - * MetricsTokenApi - functional programming interface - * @export - */ -export const MetricsTokenApiFp = function(configuration?: Configuration) { - const localVarAxiosParamCreator = MetricsTokenApiAxiosParamCreator(configuration) - return { - /** - * - * @summary Returns a JWT token that can be used to access the metrics endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async getMetricsToken(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.getMetricsToken(options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['MetricsTokenApi.getMetricsToken']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - } -}; - -/** - * MetricsTokenApi - factory interface - * @export - */ -export const MetricsTokenApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { - const localVarFp = MetricsTokenApiFp(configuration) - return { - /** - * - * @summary Returns a JWT token that can be used to access the metrics endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getMetricsToken(options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.getMetricsToken(options).then((request) => request(axios, basePath)); - }, - }; -}; - -/** - * MetricsTokenApi - object-oriented interface - * @export - * @class MetricsTokenApi - * @extends {BaseAPI} - */ -export class MetricsTokenApi extends BaseAPI { - /** - * - * @summary Returns a JWT token that can be used to access the metrics endpoint. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof MetricsTokenApi - */ - public getMetricsToken(options?: RawAxiosRequestConfig) { - return MetricsTokenApiFp(this.configuration).getMetricsToken(options).then((request) => request(this.axios, this.basePath)); - } -} - - - -/** - * OrganizationsApi - axios parameter creator - * @export - */ -export const OrganizationsApiAxiosParamCreator = function (configuration?: Configuration) { - return { - /** - * - * @summary Create organization with the parameters given. - * @param {CreateOrgParams} body Parameters used when creating the organization. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createOrg: async (body: CreateOrgParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'body' is not null or undefined - assertParamExists('createOrg', 'body', body) - const localVarPath = `/organizations`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Create organization pool with the parameters given. - * @param {string} orgID Organization ID. - * @param {CreatePoolParams} body Parameters used when creating the organization pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createOrgPool: async (orgID: string, body: CreatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('createOrgPool', 'orgID', orgID) - // verify required parameter 'body' is not null or undefined - assertParamExists('createOrgPool', 'body', body) - const localVarPath = `/organizations/{orgID}/pools` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Create organization scale set with the parameters given. - * @param {string} orgID Organization ID. - * @param {CreateScaleSetParams} body Parameters used when creating the organization scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createOrgScaleSet: async (orgID: string, body: CreateScaleSetParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('createOrgScaleSet', 'orgID', orgID) - // verify required parameter 'body' is not null or undefined - assertParamExists('createOrgScaleSet', 'body', body) - const localVarPath = `/organizations/{orgID}/scalesets` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Delete organization by ID. - * @param {string} orgID ID of the organization to delete. - * @param {boolean} [keepWebhook] If true and a webhook is installed for this organization, it will not be removed. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteOrg: async (orgID: string, keepWebhook?: boolean, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('deleteOrg', 'orgID', orgID) - const localVarPath = `/organizations/{orgID}` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - if (keepWebhook !== undefined) { - localVarQueryParameter['keepWebhook'] = keepWebhook; - } - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Delete organization pool by ID. - * @param {string} orgID Organization ID. - * @param {string} poolID ID of the organization pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteOrgPool: async (orgID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('deleteOrgPool', 'orgID', orgID) - // verify required parameter 'poolID' is not null or undefined - assertParamExists('deleteOrgPool', 'poolID', poolID) - const localVarPath = `/organizations/{orgID}/pools/{poolID}` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))) - .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Get organization by ID. - * @param {string} orgID ID of the organization to fetch. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getOrg: async (orgID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('getOrg', 'orgID', orgID) - const localVarPath = `/organizations/{orgID}` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Get organization pool by ID. - * @param {string} orgID Organization ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getOrgPool: async (orgID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('getOrgPool', 'orgID', orgID) - // verify required parameter 'poolID' is not null or undefined - assertParamExists('getOrgPool', 'poolID', poolID) - const localVarPath = `/organizations/{orgID}/pools/{poolID}` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))) - .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Get information about the GARM installed webhook on an organization. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getOrgWebhookInfo: async (orgID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('getOrgWebhookInfo', 'orgID', orgID) - const localVarPath = `/organizations/{orgID}/webhook` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. - * @param {string} orgID Organization ID. - * @param {InstallWebhookParams} body Parameters used when creating the organization webhook. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - installOrgWebhook: async (orgID: string, body: InstallWebhookParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('installOrgWebhook', 'orgID', orgID) - // verify required parameter 'body' is not null or undefined - assertParamExists('installOrgWebhook', 'body', body) - const localVarPath = `/organizations/{orgID}/webhook` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List organization instances. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listOrgInstances: async (orgID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('listOrgInstances', 'orgID', orgID) - const localVarPath = `/organizations/{orgID}/instances` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List organization pools. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listOrgPools: async (orgID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('listOrgPools', 'orgID', orgID) - const localVarPath = `/organizations/{orgID}/pools` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List organization scale sets. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listOrgScaleSets: async (orgID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('listOrgScaleSets', 'orgID', orgID) - const localVarPath = `/organizations/{orgID}/scalesets` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List organizations. - * @param {string} [name] Exact organization name to filter by - * @param {string} [endpoint] Exact endpoint name to filter by - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listOrgs: async (name?: string, endpoint?: string, options: RawAxiosRequestConfig = {}): Promise => { - const localVarPath = `/organizations`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - if (name !== undefined) { - localVarQueryParameter['name'] = name; - } - - if (endpoint !== undefined) { - localVarQueryParameter['endpoint'] = endpoint; - } - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Uninstall organization webhook. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - uninstallOrgWebhook: async (orgID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('uninstallOrgWebhook', 'orgID', orgID) - const localVarPath = `/organizations/{orgID}/webhook` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Update organization with the parameters given. - * @param {string} orgID ID of the organization to update. - * @param {UpdateEntityParams} body Parameters used when updating the organization. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateOrg: async (orgID: string, body: UpdateEntityParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('updateOrg', 'orgID', orgID) - // verify required parameter 'body' is not null or undefined - assertParamExists('updateOrg', 'body', body) - const localVarPath = `/organizations/{orgID}` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Update organization pool with the parameters given. - * @param {string} orgID Organization ID. - * @param {string} poolID ID of the organization pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the organization pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateOrgPool: async (orgID: string, poolID: string, body: UpdatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('updateOrgPool', 'orgID', orgID) - // verify required parameter 'poolID' is not null or undefined - assertParamExists('updateOrgPool', 'poolID', poolID) - // verify required parameter 'body' is not null or undefined - assertParamExists('updateOrgPool', 'body', body) - const localVarPath = `/organizations/{orgID}/pools/{poolID}` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))) - .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - } -}; - -/** - * OrganizationsApi - functional programming interface - * @export - */ -export const OrganizationsApiFp = function(configuration?: Configuration) { - const localVarAxiosParamCreator = OrganizationsApiAxiosParamCreator(configuration) - return { - /** - * - * @summary Create organization with the parameters given. - * @param {CreateOrgParams} body Parameters used when creating the organization. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async createOrg(body: CreateOrgParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.createOrg(body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.createOrg']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Create organization pool with the parameters given. - * @param {string} orgID Organization ID. - * @param {CreatePoolParams} body Parameters used when creating the organization pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async createOrgPool(orgID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.createOrgPool(orgID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.createOrgPool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Create organization scale set with the parameters given. - * @param {string} orgID Organization ID. - * @param {CreateScaleSetParams} body Parameters used when creating the organization scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async createOrgScaleSet(orgID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.createOrgScaleSet(orgID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.createOrgScaleSet']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Delete organization by ID. - * @param {string} orgID ID of the organization to delete. - * @param {boolean} [keepWebhook] If true and a webhook is installed for this organization, it will not be removed. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async deleteOrg(orgID: string, keepWebhook?: boolean, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.deleteOrg(orgID, keepWebhook, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.deleteOrg']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Delete organization pool by ID. - * @param {string} orgID Organization ID. - * @param {string} poolID ID of the organization pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async deleteOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.deleteOrgPool(orgID, poolID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.deleteOrgPool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Get organization by ID. - * @param {string} orgID ID of the organization to fetch. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async getOrg(orgID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.getOrg(orgID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.getOrg']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Get organization pool by ID. - * @param {string} orgID Organization ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async getOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.getOrgPool(orgID, poolID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.getOrgPool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Get information about the GARM installed webhook on an organization. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async getOrgWebhookInfo(orgID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.getOrgWebhookInfo(orgID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.getOrgWebhookInfo']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. - * @param {string} orgID Organization ID. - * @param {InstallWebhookParams} body Parameters used when creating the organization webhook. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async installOrgWebhook(orgID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.installOrgWebhook(orgID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.installOrgWebhook']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List organization instances. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listOrgInstances(orgID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listOrgInstances(orgID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.listOrgInstances']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List organization pools. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listOrgPools(orgID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listOrgPools(orgID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.listOrgPools']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List organization scale sets. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listOrgScaleSets(orgID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listOrgScaleSets(orgID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.listOrgScaleSets']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List organizations. - * @param {string} [name] Exact organization name to filter by - * @param {string} [endpoint] Exact endpoint name to filter by - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listOrgs(name?: string, endpoint?: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listOrgs(name, endpoint, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.listOrgs']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Uninstall organization webhook. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async uninstallOrgWebhook(orgID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.uninstallOrgWebhook(orgID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.uninstallOrgWebhook']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Update organization with the parameters given. - * @param {string} orgID ID of the organization to update. - * @param {UpdateEntityParams} body Parameters used when updating the organization. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async updateOrg(orgID: string, body: UpdateEntityParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.updateOrg(orgID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.updateOrg']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Update organization pool with the parameters given. - * @param {string} orgID Organization ID. - * @param {string} poolID ID of the organization pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the organization pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async updateOrgPool(orgID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.updateOrgPool(orgID, poolID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.updateOrgPool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - } -}; - -/** - * OrganizationsApi - factory interface - * @export - */ -export const OrganizationsApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { - const localVarFp = OrganizationsApiFp(configuration) - return { - /** - * - * @summary Create organization with the parameters given. - * @param {CreateOrgParams} body Parameters used when creating the organization. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createOrg(body: CreateOrgParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.createOrg(body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Create organization pool with the parameters given. - * @param {string} orgID Organization ID. - * @param {CreatePoolParams} body Parameters used when creating the organization pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createOrgPool(orgID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.createOrgPool(orgID, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Create organization scale set with the parameters given. - * @param {string} orgID Organization ID. - * @param {CreateScaleSetParams} body Parameters used when creating the organization scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createOrgScaleSet(orgID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.createOrgScaleSet(orgID, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Delete organization by ID. - * @param {string} orgID ID of the organization to delete. - * @param {boolean} [keepWebhook] If true and a webhook is installed for this organization, it will not be removed. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteOrg(orgID: string, keepWebhook?: boolean, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.deleteOrg(orgID, keepWebhook, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Delete organization pool by ID. - * @param {string} orgID Organization ID. - * @param {string} poolID ID of the organization pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.deleteOrgPool(orgID, poolID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Get organization by ID. - * @param {string} orgID ID of the organization to fetch. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getOrg(orgID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.getOrg(orgID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Get organization pool by ID. - * @param {string} orgID Organization ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.getOrgPool(orgID, poolID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Get information about the GARM installed webhook on an organization. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getOrgWebhookInfo(orgID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.getOrgWebhookInfo(orgID, options).then((request) => request(axios, basePath)); - }, - /** - * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. - * @param {string} orgID Organization ID. - * @param {InstallWebhookParams} body Parameters used when creating the organization webhook. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - installOrgWebhook(orgID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.installOrgWebhook(orgID, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List organization instances. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listOrgInstances(orgID: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listOrgInstances(orgID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List organization pools. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listOrgPools(orgID: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listOrgPools(orgID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List organization scale sets. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listOrgScaleSets(orgID: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listOrgScaleSets(orgID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List organizations. - * @param {string} [name] Exact organization name to filter by - * @param {string} [endpoint] Exact endpoint name to filter by - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listOrgs(name?: string, endpoint?: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listOrgs(name, endpoint, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Uninstall organization webhook. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - uninstallOrgWebhook(orgID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.uninstallOrgWebhook(orgID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Update organization with the parameters given. - * @param {string} orgID ID of the organization to update. - * @param {UpdateEntityParams} body Parameters used when updating the organization. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateOrg(orgID: string, body: UpdateEntityParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.updateOrg(orgID, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Update organization pool with the parameters given. - * @param {string} orgID Organization ID. - * @param {string} poolID ID of the organization pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the organization pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateOrgPool(orgID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.updateOrgPool(orgID, poolID, body, options).then((request) => request(axios, basePath)); - }, - }; -}; - -/** - * OrganizationsApi - object-oriented interface - * @export - * @class OrganizationsApi - * @extends {BaseAPI} - */ -export class OrganizationsApi extends BaseAPI { - /** - * - * @summary Create organization with the parameters given. - * @param {CreateOrgParams} body Parameters used when creating the organization. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof OrganizationsApi - */ - public createOrg(body: CreateOrgParams, options?: RawAxiosRequestConfig) { - return OrganizationsApiFp(this.configuration).createOrg(body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Create organization pool with the parameters given. - * @param {string} orgID Organization ID. - * @param {CreatePoolParams} body Parameters used when creating the organization pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof OrganizationsApi - */ - public createOrgPool(orgID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig) { - return OrganizationsApiFp(this.configuration).createOrgPool(orgID, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Create organization scale set with the parameters given. - * @param {string} orgID Organization ID. - * @param {CreateScaleSetParams} body Parameters used when creating the organization scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof OrganizationsApi - */ - public createOrgScaleSet(orgID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig) { - return OrganizationsApiFp(this.configuration).createOrgScaleSet(orgID, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Delete organization by ID. - * @param {string} orgID ID of the organization to delete. - * @param {boolean} [keepWebhook] If true and a webhook is installed for this organization, it will not be removed. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof OrganizationsApi - */ - public deleteOrg(orgID: string, keepWebhook?: boolean, options?: RawAxiosRequestConfig) { - return OrganizationsApiFp(this.configuration).deleteOrg(orgID, keepWebhook, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Delete organization pool by ID. - * @param {string} orgID Organization ID. - * @param {string} poolID ID of the organization pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof OrganizationsApi - */ - public deleteOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig) { - return OrganizationsApiFp(this.configuration).deleteOrgPool(orgID, poolID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Get organization by ID. - * @param {string} orgID ID of the organization to fetch. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof OrganizationsApi - */ - public getOrg(orgID: string, options?: RawAxiosRequestConfig) { - return OrganizationsApiFp(this.configuration).getOrg(orgID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Get organization pool by ID. - * @param {string} orgID Organization ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof OrganizationsApi - */ - public getOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig) { - return OrganizationsApiFp(this.configuration).getOrgPool(orgID, poolID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Get information about the GARM installed webhook on an organization. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof OrganizationsApi - */ - public getOrgWebhookInfo(orgID: string, options?: RawAxiosRequestConfig) { - return OrganizationsApiFp(this.configuration).getOrgWebhookInfo(orgID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. - * @param {string} orgID Organization ID. - * @param {InstallWebhookParams} body Parameters used when creating the organization webhook. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof OrganizationsApi - */ - public installOrgWebhook(orgID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig) { - return OrganizationsApiFp(this.configuration).installOrgWebhook(orgID, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List organization instances. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof OrganizationsApi - */ - public listOrgInstances(orgID: string, options?: RawAxiosRequestConfig) { - return OrganizationsApiFp(this.configuration).listOrgInstances(orgID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List organization pools. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof OrganizationsApi - */ - public listOrgPools(orgID: string, options?: RawAxiosRequestConfig) { - return OrganizationsApiFp(this.configuration).listOrgPools(orgID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List organization scale sets. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof OrganizationsApi - */ - public listOrgScaleSets(orgID: string, options?: RawAxiosRequestConfig) { - return OrganizationsApiFp(this.configuration).listOrgScaleSets(orgID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List organizations. - * @param {string} [name] Exact organization name to filter by - * @param {string} [endpoint] Exact endpoint name to filter by - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof OrganizationsApi - */ - public listOrgs(name?: string, endpoint?: string, options?: RawAxiosRequestConfig) { - return OrganizationsApiFp(this.configuration).listOrgs(name, endpoint, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Uninstall organization webhook. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof OrganizationsApi - */ - public uninstallOrgWebhook(orgID: string, options?: RawAxiosRequestConfig) { - return OrganizationsApiFp(this.configuration).uninstallOrgWebhook(orgID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Update organization with the parameters given. - * @param {string} orgID ID of the organization to update. - * @param {UpdateEntityParams} body Parameters used when updating the organization. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof OrganizationsApi - */ - public updateOrg(orgID: string, body: UpdateEntityParams, options?: RawAxiosRequestConfig) { - return OrganizationsApiFp(this.configuration).updateOrg(orgID, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Update organization pool with the parameters given. - * @param {string} orgID Organization ID. - * @param {string} poolID ID of the organization pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the organization pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof OrganizationsApi - */ - public updateOrgPool(orgID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig) { - return OrganizationsApiFp(this.configuration).updateOrgPool(orgID, poolID, body, options).then((request) => request(this.axios, this.basePath)); - } -} - - - -/** - * PoolsApi - axios parameter creator - * @export - */ -export const PoolsApiAxiosParamCreator = function (configuration?: Configuration) { - return { - /** - * - * @summary Create enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {CreatePoolParams} body Parameters used when creating the enterprise pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createEnterprisePool: async (enterpriseID: string, body: CreatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'enterpriseID' is not null or undefined - assertParamExists('createEnterprisePool', 'enterpriseID', enterpriseID) - // verify required parameter 'body' is not null or undefined - assertParamExists('createEnterprisePool', 'body', body) - const localVarPath = `/enterprises/{enterpriseID}/pools` - .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Create organization pool with the parameters given. - * @param {string} orgID Organization ID. - * @param {CreatePoolParams} body Parameters used when creating the organization pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createOrgPool: async (orgID: string, body: CreatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('createOrgPool', 'orgID', orgID) - // verify required parameter 'body' is not null or undefined - assertParamExists('createOrgPool', 'body', body) - const localVarPath = `/organizations/{orgID}/pools` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Create repository pool with the parameters given. - * @param {string} repoID Repository ID. - * @param {CreatePoolParams} body Parameters used when creating the repository pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createRepoPool: async (repoID: string, body: CreatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('createRepoPool', 'repoID', repoID) - // verify required parameter 'body' is not null or undefined - assertParamExists('createRepoPool', 'body', body) - const localVarPath = `/repositories/{repoID}/pools` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Delete enterprise pool by ID. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID ID of the enterprise pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteEnterprisePool: async (enterpriseID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'enterpriseID' is not null or undefined - assertParamExists('deleteEnterprisePool', 'enterpriseID', enterpriseID) - // verify required parameter 'poolID' is not null or undefined - assertParamExists('deleteEnterprisePool', 'poolID', poolID) - const localVarPath = `/enterprises/{enterpriseID}/pools/{poolID}` - .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))) - .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Delete organization pool by ID. - * @param {string} orgID Organization ID. - * @param {string} poolID ID of the organization pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteOrgPool: async (orgID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('deleteOrgPool', 'orgID', orgID) - // verify required parameter 'poolID' is not null or undefined - assertParamExists('deleteOrgPool', 'poolID', poolID) - const localVarPath = `/organizations/{orgID}/pools/{poolID}` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))) - .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Delete pool by ID. - * @param {string} poolID ID of the pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deletePool: async (poolID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'poolID' is not null or undefined - assertParamExists('deletePool', 'poolID', poolID) - const localVarPath = `/pools/{poolID}` - .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Delete repository pool by ID. - * @param {string} repoID Repository ID. - * @param {string} poolID ID of the repository pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteRepoPool: async (repoID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('deleteRepoPool', 'repoID', repoID) - // verify required parameter 'poolID' is not null or undefined - assertParamExists('deleteRepoPool', 'poolID', poolID) - const localVarPath = `/repositories/{repoID}/pools/{poolID}` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))) - .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Get enterprise pool by ID. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getEnterprisePool: async (enterpriseID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'enterpriseID' is not null or undefined - assertParamExists('getEnterprisePool', 'enterpriseID', enterpriseID) - // verify required parameter 'poolID' is not null or undefined - assertParamExists('getEnterprisePool', 'poolID', poolID) - const localVarPath = `/enterprises/{enterpriseID}/pools/{poolID}` - .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))) - .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Get organization pool by ID. - * @param {string} orgID Organization ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getOrgPool: async (orgID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('getOrgPool', 'orgID', orgID) - // verify required parameter 'poolID' is not null or undefined - assertParamExists('getOrgPool', 'poolID', poolID) - const localVarPath = `/organizations/{orgID}/pools/{poolID}` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))) - .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Get pool by ID. - * @param {string} poolID ID of the pool to fetch. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getPool: async (poolID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'poolID' is not null or undefined - assertParamExists('getPool', 'poolID', poolID) - const localVarPath = `/pools/{poolID}` - .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Get repository pool by ID. - * @param {string} repoID Repository ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getRepoPool: async (repoID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('getRepoPool', 'repoID', repoID) - // verify required parameter 'poolID' is not null or undefined - assertParamExists('getRepoPool', 'poolID', poolID) - const localVarPath = `/repositories/{repoID}/pools/{poolID}` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))) - .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List enterprise pools. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listEnterprisePools: async (enterpriseID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'enterpriseID' is not null or undefined - assertParamExists('listEnterprisePools', 'enterpriseID', enterpriseID) - const localVarPath = `/enterprises/{enterpriseID}/pools` - .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List organization pools. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listOrgPools: async (orgID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('listOrgPools', 'orgID', orgID) - const localVarPath = `/organizations/{orgID}/pools` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List all pools. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listPools: async (options: RawAxiosRequestConfig = {}): Promise => { - const localVarPath = `/pools`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List repository pools. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listRepoPools: async (repoID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('listRepoPools', 'repoID', repoID) - const localVarPath = `/repositories/{repoID}/pools` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Update enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID ID of the enterprise pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the enterprise pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateEnterprisePool: async (enterpriseID: string, poolID: string, body: UpdatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'enterpriseID' is not null or undefined - assertParamExists('updateEnterprisePool', 'enterpriseID', enterpriseID) - // verify required parameter 'poolID' is not null or undefined - assertParamExists('updateEnterprisePool', 'poolID', poolID) - // verify required parameter 'body' is not null or undefined - assertParamExists('updateEnterprisePool', 'body', body) - const localVarPath = `/enterprises/{enterpriseID}/pools/{poolID}` - .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))) - .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Update organization pool with the parameters given. - * @param {string} orgID Organization ID. - * @param {string} poolID ID of the organization pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the organization pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateOrgPool: async (orgID: string, poolID: string, body: UpdatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('updateOrgPool', 'orgID', orgID) - // verify required parameter 'poolID' is not null or undefined - assertParamExists('updateOrgPool', 'poolID', poolID) - // verify required parameter 'body' is not null or undefined - assertParamExists('updateOrgPool', 'body', body) - const localVarPath = `/organizations/{orgID}/pools/{poolID}` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))) - .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Update pool by ID. - * @param {string} poolID ID of the pool to update. - * @param {UpdatePoolParams} body Parameters to update the pool with. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updatePool: async (poolID: string, body: UpdatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'poolID' is not null or undefined - assertParamExists('updatePool', 'poolID', poolID) - // verify required parameter 'body' is not null or undefined - assertParamExists('updatePool', 'body', body) - const localVarPath = `/pools/{poolID}` - .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Update repository pool with the parameters given. - * @param {string} repoID Repository ID. - * @param {string} poolID ID of the repository pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the repository pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateRepoPool: async (repoID: string, poolID: string, body: UpdatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('updateRepoPool', 'repoID', repoID) - // verify required parameter 'poolID' is not null or undefined - assertParamExists('updateRepoPool', 'poolID', poolID) - // verify required parameter 'body' is not null or undefined - assertParamExists('updateRepoPool', 'body', body) - const localVarPath = `/repositories/{repoID}/pools/{poolID}` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))) - .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - } -}; - -/** - * PoolsApi - functional programming interface - * @export - */ -export const PoolsApiFp = function(configuration?: Configuration) { - const localVarAxiosParamCreator = PoolsApiAxiosParamCreator(configuration) - return { - /** - * - * @summary Create enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {CreatePoolParams} body Parameters used when creating the enterprise pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async createEnterprisePool(enterpriseID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.createEnterprisePool(enterpriseID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['PoolsApi.createEnterprisePool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Create organization pool with the parameters given. - * @param {string} orgID Organization ID. - * @param {CreatePoolParams} body Parameters used when creating the organization pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async createOrgPool(orgID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.createOrgPool(orgID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['PoolsApi.createOrgPool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Create repository pool with the parameters given. - * @param {string} repoID Repository ID. - * @param {CreatePoolParams} body Parameters used when creating the repository pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async createRepoPool(repoID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.createRepoPool(repoID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['PoolsApi.createRepoPool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Delete enterprise pool by ID. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID ID of the enterprise pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async deleteEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.deleteEnterprisePool(enterpriseID, poolID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['PoolsApi.deleteEnterprisePool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Delete organization pool by ID. - * @param {string} orgID Organization ID. - * @param {string} poolID ID of the organization pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async deleteOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.deleteOrgPool(orgID, poolID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['PoolsApi.deleteOrgPool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Delete pool by ID. - * @param {string} poolID ID of the pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async deletePool(poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.deletePool(poolID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['PoolsApi.deletePool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Delete repository pool by ID. - * @param {string} repoID Repository ID. - * @param {string} poolID ID of the repository pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async deleteRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.deleteRepoPool(repoID, poolID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['PoolsApi.deleteRepoPool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Get enterprise pool by ID. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async getEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.getEnterprisePool(enterpriseID, poolID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['PoolsApi.getEnterprisePool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Get organization pool by ID. - * @param {string} orgID Organization ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async getOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.getOrgPool(orgID, poolID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['PoolsApi.getOrgPool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Get pool by ID. - * @param {string} poolID ID of the pool to fetch. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async getPool(poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.getPool(poolID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['PoolsApi.getPool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Get repository pool by ID. - * @param {string} repoID Repository ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async getRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.getRepoPool(repoID, poolID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['PoolsApi.getRepoPool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List enterprise pools. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listEnterprisePools(enterpriseID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listEnterprisePools(enterpriseID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['PoolsApi.listEnterprisePools']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List organization pools. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listOrgPools(orgID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listOrgPools(orgID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['PoolsApi.listOrgPools']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List all pools. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listPools(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listPools(options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['PoolsApi.listPools']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List repository pools. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listRepoPools(repoID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listRepoPools(repoID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['PoolsApi.listRepoPools']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Update enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID ID of the enterprise pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the enterprise pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async updateEnterprisePool(enterpriseID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.updateEnterprisePool(enterpriseID, poolID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['PoolsApi.updateEnterprisePool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Update organization pool with the parameters given. - * @param {string} orgID Organization ID. - * @param {string} poolID ID of the organization pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the organization pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async updateOrgPool(orgID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.updateOrgPool(orgID, poolID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['PoolsApi.updateOrgPool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Update pool by ID. - * @param {string} poolID ID of the pool to update. - * @param {UpdatePoolParams} body Parameters to update the pool with. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async updatePool(poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.updatePool(poolID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['PoolsApi.updatePool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Update repository pool with the parameters given. - * @param {string} repoID Repository ID. - * @param {string} poolID ID of the repository pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the repository pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async updateRepoPool(repoID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.updateRepoPool(repoID, poolID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['PoolsApi.updateRepoPool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - } -}; - -/** - * PoolsApi - factory interface - * @export - */ -export const PoolsApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { - const localVarFp = PoolsApiFp(configuration) - return { - /** - * - * @summary Create enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {CreatePoolParams} body Parameters used when creating the enterprise pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createEnterprisePool(enterpriseID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.createEnterprisePool(enterpriseID, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Create organization pool with the parameters given. - * @param {string} orgID Organization ID. - * @param {CreatePoolParams} body Parameters used when creating the organization pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createOrgPool(orgID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.createOrgPool(orgID, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Create repository pool with the parameters given. - * @param {string} repoID Repository ID. - * @param {CreatePoolParams} body Parameters used when creating the repository pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createRepoPool(repoID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.createRepoPool(repoID, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Delete enterprise pool by ID. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID ID of the enterprise pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.deleteEnterprisePool(enterpriseID, poolID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Delete organization pool by ID. - * @param {string} orgID Organization ID. - * @param {string} poolID ID of the organization pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.deleteOrgPool(orgID, poolID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Delete pool by ID. - * @param {string} poolID ID of the pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deletePool(poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.deletePool(poolID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Delete repository pool by ID. - * @param {string} repoID Repository ID. - * @param {string} poolID ID of the repository pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.deleteRepoPool(repoID, poolID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Get enterprise pool by ID. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.getEnterprisePool(enterpriseID, poolID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Get organization pool by ID. - * @param {string} orgID Organization ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.getOrgPool(orgID, poolID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Get pool by ID. - * @param {string} poolID ID of the pool to fetch. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getPool(poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.getPool(poolID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Get repository pool by ID. - * @param {string} repoID Repository ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.getRepoPool(repoID, poolID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List enterprise pools. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listEnterprisePools(enterpriseID: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listEnterprisePools(enterpriseID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List organization pools. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listOrgPools(orgID: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listOrgPools(orgID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List all pools. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listPools(options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listPools(options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List repository pools. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listRepoPools(repoID: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listRepoPools(repoID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Update enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID ID of the enterprise pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the enterprise pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateEnterprisePool(enterpriseID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.updateEnterprisePool(enterpriseID, poolID, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Update organization pool with the parameters given. - * @param {string} orgID Organization ID. - * @param {string} poolID ID of the organization pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the organization pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateOrgPool(orgID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.updateOrgPool(orgID, poolID, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Update pool by ID. - * @param {string} poolID ID of the pool to update. - * @param {UpdatePoolParams} body Parameters to update the pool with. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updatePool(poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.updatePool(poolID, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Update repository pool with the parameters given. - * @param {string} repoID Repository ID. - * @param {string} poolID ID of the repository pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the repository pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateRepoPool(repoID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.updateRepoPool(repoID, poolID, body, options).then((request) => request(axios, basePath)); - }, - }; -}; - -/** - * PoolsApi - object-oriented interface - * @export - * @class PoolsApi - * @extends {BaseAPI} - */ -export class PoolsApi extends BaseAPI { - /** - * - * @summary Create enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {CreatePoolParams} body Parameters used when creating the enterprise pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof PoolsApi - */ - public createEnterprisePool(enterpriseID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig) { - return PoolsApiFp(this.configuration).createEnterprisePool(enterpriseID, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Create organization pool with the parameters given. - * @param {string} orgID Organization ID. - * @param {CreatePoolParams} body Parameters used when creating the organization pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof PoolsApi - */ - public createOrgPool(orgID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig) { - return PoolsApiFp(this.configuration).createOrgPool(orgID, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Create repository pool with the parameters given. - * @param {string} repoID Repository ID. - * @param {CreatePoolParams} body Parameters used when creating the repository pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof PoolsApi - */ - public createRepoPool(repoID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig) { - return PoolsApiFp(this.configuration).createRepoPool(repoID, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Delete enterprise pool by ID. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID ID of the enterprise pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof PoolsApi - */ - public deleteEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig) { - return PoolsApiFp(this.configuration).deleteEnterprisePool(enterpriseID, poolID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Delete organization pool by ID. - * @param {string} orgID Organization ID. - * @param {string} poolID ID of the organization pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof PoolsApi - */ - public deleteOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig) { - return PoolsApiFp(this.configuration).deleteOrgPool(orgID, poolID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Delete pool by ID. - * @param {string} poolID ID of the pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof PoolsApi - */ - public deletePool(poolID: string, options?: RawAxiosRequestConfig) { - return PoolsApiFp(this.configuration).deletePool(poolID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Delete repository pool by ID. - * @param {string} repoID Repository ID. - * @param {string} poolID ID of the repository pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof PoolsApi - */ - public deleteRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig) { - return PoolsApiFp(this.configuration).deleteRepoPool(repoID, poolID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Get enterprise pool by ID. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof PoolsApi - */ - public getEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig) { - return PoolsApiFp(this.configuration).getEnterprisePool(enterpriseID, poolID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Get organization pool by ID. - * @param {string} orgID Organization ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof PoolsApi - */ - public getOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig) { - return PoolsApiFp(this.configuration).getOrgPool(orgID, poolID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Get pool by ID. - * @param {string} poolID ID of the pool to fetch. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof PoolsApi - */ - public getPool(poolID: string, options?: RawAxiosRequestConfig) { - return PoolsApiFp(this.configuration).getPool(poolID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Get repository pool by ID. - * @param {string} repoID Repository ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof PoolsApi - */ - public getRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig) { - return PoolsApiFp(this.configuration).getRepoPool(repoID, poolID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List enterprise pools. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof PoolsApi - */ - public listEnterprisePools(enterpriseID: string, options?: RawAxiosRequestConfig) { - return PoolsApiFp(this.configuration).listEnterprisePools(enterpriseID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List organization pools. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof PoolsApi - */ - public listOrgPools(orgID: string, options?: RawAxiosRequestConfig) { - return PoolsApiFp(this.configuration).listOrgPools(orgID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List all pools. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof PoolsApi - */ - public listPools(options?: RawAxiosRequestConfig) { - return PoolsApiFp(this.configuration).listPools(options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List repository pools. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof PoolsApi - */ - public listRepoPools(repoID: string, options?: RawAxiosRequestConfig) { - return PoolsApiFp(this.configuration).listRepoPools(repoID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Update enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {string} poolID ID of the enterprise pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the enterprise pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof PoolsApi - */ - public updateEnterprisePool(enterpriseID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig) { - return PoolsApiFp(this.configuration).updateEnterprisePool(enterpriseID, poolID, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Update organization pool with the parameters given. - * @param {string} orgID Organization ID. - * @param {string} poolID ID of the organization pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the organization pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof PoolsApi - */ - public updateOrgPool(orgID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig) { - return PoolsApiFp(this.configuration).updateOrgPool(orgID, poolID, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Update pool by ID. - * @param {string} poolID ID of the pool to update. - * @param {UpdatePoolParams} body Parameters to update the pool with. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof PoolsApi - */ - public updatePool(poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig) { - return PoolsApiFp(this.configuration).updatePool(poolID, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Update repository pool with the parameters given. - * @param {string} repoID Repository ID. - * @param {string} poolID ID of the repository pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the repository pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof PoolsApi - */ - public updateRepoPool(repoID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig) { - return PoolsApiFp(this.configuration).updateRepoPool(repoID, poolID, body, options).then((request) => request(this.axios, this.basePath)); - } -} - - - -/** - * ProvidersApi - axios parameter creator - * @export - */ -export const ProvidersApiAxiosParamCreator = function (configuration?: Configuration) { - return { - /** - * - * @summary List all providers. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listProviders: async (options: RawAxiosRequestConfig = {}): Promise => { - const localVarPath = `/providers`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - } -}; - -/** - * ProvidersApi - functional programming interface - * @export - */ -export const ProvidersApiFp = function(configuration?: Configuration) { - const localVarAxiosParamCreator = ProvidersApiAxiosParamCreator(configuration) - return { - /** - * - * @summary List all providers. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listProviders(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listProviders(options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['ProvidersApi.listProviders']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - } -}; - -/** - * ProvidersApi - factory interface - * @export - */ -export const ProvidersApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { - const localVarFp = ProvidersApiFp(configuration) - return { - /** - * - * @summary List all providers. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listProviders(options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listProviders(options).then((request) => request(axios, basePath)); - }, - }; -}; - -/** - * ProvidersApi - object-oriented interface - * @export - * @class ProvidersApi - * @extends {BaseAPI} - */ -export class ProvidersApi extends BaseAPI { - /** - * - * @summary List all providers. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof ProvidersApi - */ - public listProviders(options?: RawAxiosRequestConfig) { - return ProvidersApiFp(this.configuration).listProviders(options).then((request) => request(this.axios, this.basePath)); - } -} - - - -/** - * RepositoriesApi - axios parameter creator - * @export - */ -export const RepositoriesApiAxiosParamCreator = function (configuration?: Configuration) { - return { - /** - * - * @summary Create repository with the parameters given. - * @param {CreateRepoParams} body Parameters used when creating the repository. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createRepo: async (body: CreateRepoParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'body' is not null or undefined - assertParamExists('createRepo', 'body', body) - const localVarPath = `/repositories`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Create repository pool with the parameters given. - * @param {string} repoID Repository ID. - * @param {CreatePoolParams} body Parameters used when creating the repository pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createRepoPool: async (repoID: string, body: CreatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('createRepoPool', 'repoID', repoID) - // verify required parameter 'body' is not null or undefined - assertParamExists('createRepoPool', 'body', body) - const localVarPath = `/repositories/{repoID}/pools` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Create repository scale set with the parameters given. - * @param {string} repoID Repository ID. - * @param {CreateScaleSetParams} body Parameters used when creating the repository scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createRepoScaleSet: async (repoID: string, body: CreateScaleSetParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('createRepoScaleSet', 'repoID', repoID) - // verify required parameter 'body' is not null or undefined - assertParamExists('createRepoScaleSet', 'body', body) - const localVarPath = `/repositories/{repoID}/scalesets` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Delete repository by ID. - * @param {string} repoID ID of the repository to delete. - * @param {boolean} [keepWebhook] If true and a webhook is installed for this repo, it will not be removed. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteRepo: async (repoID: string, keepWebhook?: boolean, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('deleteRepo', 'repoID', repoID) - const localVarPath = `/repositories/{repoID}` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - if (keepWebhook !== undefined) { - localVarQueryParameter['keepWebhook'] = keepWebhook; - } - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Delete repository pool by ID. - * @param {string} repoID Repository ID. - * @param {string} poolID ID of the repository pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteRepoPool: async (repoID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('deleteRepoPool', 'repoID', repoID) - // verify required parameter 'poolID' is not null or undefined - assertParamExists('deleteRepoPool', 'poolID', poolID) - const localVarPath = `/repositories/{repoID}/pools/{poolID}` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))) - .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Get repository by ID. - * @param {string} repoID ID of the repository to fetch. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getRepo: async (repoID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('getRepo', 'repoID', repoID) - const localVarPath = `/repositories/{repoID}` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Get repository pool by ID. - * @param {string} repoID Repository ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getRepoPool: async (repoID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('getRepoPool', 'repoID', repoID) - // verify required parameter 'poolID' is not null or undefined - assertParamExists('getRepoPool', 'poolID', poolID) - const localVarPath = `/repositories/{repoID}/pools/{poolID}` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))) - .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Get information about the GARM installed webhook on a repository. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getRepoWebhookInfo: async (repoID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('getRepoWebhookInfo', 'repoID', repoID) - const localVarPath = `/repositories/{repoID}/webhook` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. - * @param {string} repoID Repository ID. - * @param {InstallWebhookParams} body Parameters used when creating the repository webhook. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - installRepoWebhook: async (repoID: string, body: InstallWebhookParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('installRepoWebhook', 'repoID', repoID) - // verify required parameter 'body' is not null or undefined - assertParamExists('installRepoWebhook', 'body', body) - const localVarPath = `/repositories/{repoID}/webhook` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List repository instances. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listRepoInstances: async (repoID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('listRepoInstances', 'repoID', repoID) - const localVarPath = `/repositories/{repoID}/instances` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List repository pools. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listRepoPools: async (repoID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('listRepoPools', 'repoID', repoID) - const localVarPath = `/repositories/{repoID}/pools` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List repository scale sets. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listRepoScaleSets: async (repoID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('listRepoScaleSets', 'repoID', repoID) - const localVarPath = `/repositories/{repoID}/scalesets` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List repositories. - * @param {string} [owner] Exact owner name to filter by - * @param {string} [name] Exact repository name to filter by - * @param {string} [endpoint] Exact endpoint name to filter by - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listRepos: async (owner?: string, name?: string, endpoint?: string, options: RawAxiosRequestConfig = {}): Promise => { - const localVarPath = `/repositories`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - if (owner !== undefined) { - localVarQueryParameter['owner'] = owner; - } - - if (name !== undefined) { - localVarQueryParameter['name'] = name; - } - - if (endpoint !== undefined) { - localVarQueryParameter['endpoint'] = endpoint; - } - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Uninstall organization webhook. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - uninstallRepoWebhook: async (repoID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('uninstallRepoWebhook', 'repoID', repoID) - const localVarPath = `/repositories/{repoID}/webhook` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Update repository with the parameters given. - * @param {string} repoID ID of the repository to update. - * @param {UpdateEntityParams} body Parameters used when updating the repository. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateRepo: async (repoID: string, body: UpdateEntityParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('updateRepo', 'repoID', repoID) - // verify required parameter 'body' is not null or undefined - assertParamExists('updateRepo', 'body', body) - const localVarPath = `/repositories/{repoID}` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Update repository pool with the parameters given. - * @param {string} repoID Repository ID. - * @param {string} poolID ID of the repository pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the repository pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateRepoPool: async (repoID: string, poolID: string, body: UpdatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('updateRepoPool', 'repoID', repoID) - // verify required parameter 'poolID' is not null or undefined - assertParamExists('updateRepoPool', 'poolID', poolID) - // verify required parameter 'body' is not null or undefined - assertParamExists('updateRepoPool', 'body', body) - const localVarPath = `/repositories/{repoID}/pools/{poolID}` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))) - .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - } -}; - -/** - * RepositoriesApi - functional programming interface - * @export - */ -export const RepositoriesApiFp = function(configuration?: Configuration) { - const localVarAxiosParamCreator = RepositoriesApiAxiosParamCreator(configuration) - return { - /** - * - * @summary Create repository with the parameters given. - * @param {CreateRepoParams} body Parameters used when creating the repository. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async createRepo(body: CreateRepoParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.createRepo(body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.createRepo']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Create repository pool with the parameters given. - * @param {string} repoID Repository ID. - * @param {CreatePoolParams} body Parameters used when creating the repository pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async createRepoPool(repoID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.createRepoPool(repoID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.createRepoPool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Create repository scale set with the parameters given. - * @param {string} repoID Repository ID. - * @param {CreateScaleSetParams} body Parameters used when creating the repository scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async createRepoScaleSet(repoID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.createRepoScaleSet(repoID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.createRepoScaleSet']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Delete repository by ID. - * @param {string} repoID ID of the repository to delete. - * @param {boolean} [keepWebhook] If true and a webhook is installed for this repo, it will not be removed. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async deleteRepo(repoID: string, keepWebhook?: boolean, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.deleteRepo(repoID, keepWebhook, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.deleteRepo']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Delete repository pool by ID. - * @param {string} repoID Repository ID. - * @param {string} poolID ID of the repository pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async deleteRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.deleteRepoPool(repoID, poolID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.deleteRepoPool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Get repository by ID. - * @param {string} repoID ID of the repository to fetch. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async getRepo(repoID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.getRepo(repoID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.getRepo']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Get repository pool by ID. - * @param {string} repoID Repository ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async getRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.getRepoPool(repoID, poolID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.getRepoPool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Get information about the GARM installed webhook on a repository. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async getRepoWebhookInfo(repoID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.getRepoWebhookInfo(repoID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.getRepoWebhookInfo']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. - * @param {string} repoID Repository ID. - * @param {InstallWebhookParams} body Parameters used when creating the repository webhook. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async installRepoWebhook(repoID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.installRepoWebhook(repoID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.installRepoWebhook']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List repository instances. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listRepoInstances(repoID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listRepoInstances(repoID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.listRepoInstances']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List repository pools. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listRepoPools(repoID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listRepoPools(repoID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.listRepoPools']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List repository scale sets. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listRepoScaleSets(repoID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listRepoScaleSets(repoID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.listRepoScaleSets']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List repositories. - * @param {string} [owner] Exact owner name to filter by - * @param {string} [name] Exact repository name to filter by - * @param {string} [endpoint] Exact endpoint name to filter by - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listRepos(owner?: string, name?: string, endpoint?: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listRepos(owner, name, endpoint, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.listRepos']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Uninstall organization webhook. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async uninstallRepoWebhook(repoID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.uninstallRepoWebhook(repoID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.uninstallRepoWebhook']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Update repository with the parameters given. - * @param {string} repoID ID of the repository to update. - * @param {UpdateEntityParams} body Parameters used when updating the repository. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async updateRepo(repoID: string, body: UpdateEntityParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.updateRepo(repoID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.updateRepo']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Update repository pool with the parameters given. - * @param {string} repoID Repository ID. - * @param {string} poolID ID of the repository pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the repository pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async updateRepoPool(repoID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.updateRepoPool(repoID, poolID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.updateRepoPool']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - } -}; - -/** - * RepositoriesApi - factory interface - * @export - */ -export const RepositoriesApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { - const localVarFp = RepositoriesApiFp(configuration) - return { - /** - * - * @summary Create repository with the parameters given. - * @param {CreateRepoParams} body Parameters used when creating the repository. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createRepo(body: CreateRepoParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.createRepo(body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Create repository pool with the parameters given. - * @param {string} repoID Repository ID. - * @param {CreatePoolParams} body Parameters used when creating the repository pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createRepoPool(repoID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.createRepoPool(repoID, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Create repository scale set with the parameters given. - * @param {string} repoID Repository ID. - * @param {CreateScaleSetParams} body Parameters used when creating the repository scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createRepoScaleSet(repoID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.createRepoScaleSet(repoID, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Delete repository by ID. - * @param {string} repoID ID of the repository to delete. - * @param {boolean} [keepWebhook] If true and a webhook is installed for this repo, it will not be removed. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteRepo(repoID: string, keepWebhook?: boolean, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.deleteRepo(repoID, keepWebhook, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Delete repository pool by ID. - * @param {string} repoID Repository ID. - * @param {string} poolID ID of the repository pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.deleteRepoPool(repoID, poolID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Get repository by ID. - * @param {string} repoID ID of the repository to fetch. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getRepo(repoID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.getRepo(repoID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Get repository pool by ID. - * @param {string} repoID Repository ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.getRepoPool(repoID, poolID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Get information about the GARM installed webhook on a repository. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getRepoWebhookInfo(repoID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.getRepoWebhookInfo(repoID, options).then((request) => request(axios, basePath)); - }, - /** - * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. - * @param {string} repoID Repository ID. - * @param {InstallWebhookParams} body Parameters used when creating the repository webhook. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - installRepoWebhook(repoID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.installRepoWebhook(repoID, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List repository instances. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listRepoInstances(repoID: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listRepoInstances(repoID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List repository pools. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listRepoPools(repoID: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listRepoPools(repoID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List repository scale sets. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listRepoScaleSets(repoID: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listRepoScaleSets(repoID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List repositories. - * @param {string} [owner] Exact owner name to filter by - * @param {string} [name] Exact repository name to filter by - * @param {string} [endpoint] Exact endpoint name to filter by - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listRepos(owner?: string, name?: string, endpoint?: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listRepos(owner, name, endpoint, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Uninstall organization webhook. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - uninstallRepoWebhook(repoID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.uninstallRepoWebhook(repoID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Update repository with the parameters given. - * @param {string} repoID ID of the repository to update. - * @param {UpdateEntityParams} body Parameters used when updating the repository. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateRepo(repoID: string, body: UpdateEntityParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.updateRepo(repoID, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Update repository pool with the parameters given. - * @param {string} repoID Repository ID. - * @param {string} poolID ID of the repository pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the repository pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateRepoPool(repoID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.updateRepoPool(repoID, poolID, body, options).then((request) => request(axios, basePath)); - }, - }; -}; - -/** - * RepositoriesApi - object-oriented interface - * @export - * @class RepositoriesApi - * @extends {BaseAPI} - */ -export class RepositoriesApi extends BaseAPI { - /** - * - * @summary Create repository with the parameters given. - * @param {CreateRepoParams} body Parameters used when creating the repository. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof RepositoriesApi - */ - public createRepo(body: CreateRepoParams, options?: RawAxiosRequestConfig) { - return RepositoriesApiFp(this.configuration).createRepo(body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Create repository pool with the parameters given. - * @param {string} repoID Repository ID. - * @param {CreatePoolParams} body Parameters used when creating the repository pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof RepositoriesApi - */ - public createRepoPool(repoID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig) { - return RepositoriesApiFp(this.configuration).createRepoPool(repoID, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Create repository scale set with the parameters given. - * @param {string} repoID Repository ID. - * @param {CreateScaleSetParams} body Parameters used when creating the repository scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof RepositoriesApi - */ - public createRepoScaleSet(repoID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig) { - return RepositoriesApiFp(this.configuration).createRepoScaleSet(repoID, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Delete repository by ID. - * @param {string} repoID ID of the repository to delete. - * @param {boolean} [keepWebhook] If true and a webhook is installed for this repo, it will not be removed. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof RepositoriesApi - */ - public deleteRepo(repoID: string, keepWebhook?: boolean, options?: RawAxiosRequestConfig) { - return RepositoriesApiFp(this.configuration).deleteRepo(repoID, keepWebhook, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Delete repository pool by ID. - * @param {string} repoID Repository ID. - * @param {string} poolID ID of the repository pool to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof RepositoriesApi - */ - public deleteRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig) { - return RepositoriesApiFp(this.configuration).deleteRepoPool(repoID, poolID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Get repository by ID. - * @param {string} repoID ID of the repository to fetch. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof RepositoriesApi - */ - public getRepo(repoID: string, options?: RawAxiosRequestConfig) { - return RepositoriesApiFp(this.configuration).getRepo(repoID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Get repository pool by ID. - * @param {string} repoID Repository ID. - * @param {string} poolID Pool ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof RepositoriesApi - */ - public getRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig) { - return RepositoriesApiFp(this.configuration).getRepoPool(repoID, poolID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Get information about the GARM installed webhook on a repository. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof RepositoriesApi - */ - public getRepoWebhookInfo(repoID: string, options?: RawAxiosRequestConfig) { - return RepositoriesApiFp(this.configuration).getRepoWebhookInfo(repoID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. - * @param {string} repoID Repository ID. - * @param {InstallWebhookParams} body Parameters used when creating the repository webhook. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof RepositoriesApi - */ - public installRepoWebhook(repoID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig) { - return RepositoriesApiFp(this.configuration).installRepoWebhook(repoID, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List repository instances. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof RepositoriesApi - */ - public listRepoInstances(repoID: string, options?: RawAxiosRequestConfig) { - return RepositoriesApiFp(this.configuration).listRepoInstances(repoID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List repository pools. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof RepositoriesApi - */ - public listRepoPools(repoID: string, options?: RawAxiosRequestConfig) { - return RepositoriesApiFp(this.configuration).listRepoPools(repoID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List repository scale sets. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof RepositoriesApi - */ - public listRepoScaleSets(repoID: string, options?: RawAxiosRequestConfig) { - return RepositoriesApiFp(this.configuration).listRepoScaleSets(repoID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List repositories. - * @param {string} [owner] Exact owner name to filter by - * @param {string} [name] Exact repository name to filter by - * @param {string} [endpoint] Exact endpoint name to filter by - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof RepositoriesApi - */ - public listRepos(owner?: string, name?: string, endpoint?: string, options?: RawAxiosRequestConfig) { - return RepositoriesApiFp(this.configuration).listRepos(owner, name, endpoint, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Uninstall organization webhook. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof RepositoriesApi - */ - public uninstallRepoWebhook(repoID: string, options?: RawAxiosRequestConfig) { - return RepositoriesApiFp(this.configuration).uninstallRepoWebhook(repoID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Update repository with the parameters given. - * @param {string} repoID ID of the repository to update. - * @param {UpdateEntityParams} body Parameters used when updating the repository. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof RepositoriesApi - */ - public updateRepo(repoID: string, body: UpdateEntityParams, options?: RawAxiosRequestConfig) { - return RepositoriesApiFp(this.configuration).updateRepo(repoID, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Update repository pool with the parameters given. - * @param {string} repoID Repository ID. - * @param {string} poolID ID of the repository pool to update. - * @param {UpdatePoolParams} body Parameters used when updating the repository pool. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof RepositoriesApi - */ - public updateRepoPool(repoID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig) { - return RepositoriesApiFp(this.configuration).updateRepoPool(repoID, poolID, body, options).then((request) => request(this.axios, this.basePath)); - } -} - - - -/** - * ScalesetsApi - axios parameter creator - * @export - */ -export const ScalesetsApiAxiosParamCreator = function (configuration?: Configuration) { - return { - /** - * - * @summary Create enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {CreateScaleSetParams} body Parameters used when creating the enterprise scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createEnterpriseScaleSet: async (enterpriseID: string, body: CreateScaleSetParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'enterpriseID' is not null or undefined - assertParamExists('createEnterpriseScaleSet', 'enterpriseID', enterpriseID) - // verify required parameter 'body' is not null or undefined - assertParamExists('createEnterpriseScaleSet', 'body', body) - const localVarPath = `/enterprises/{enterpriseID}/scalesets` - .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Create organization scale set with the parameters given. - * @param {string} orgID Organization ID. - * @param {CreateScaleSetParams} body Parameters used when creating the organization scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createOrgScaleSet: async (orgID: string, body: CreateScaleSetParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('createOrgScaleSet', 'orgID', orgID) - // verify required parameter 'body' is not null or undefined - assertParamExists('createOrgScaleSet', 'body', body) - const localVarPath = `/organizations/{orgID}/scalesets` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Create repository scale set with the parameters given. - * @param {string} repoID Repository ID. - * @param {CreateScaleSetParams} body Parameters used when creating the repository scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createRepoScaleSet: async (repoID: string, body: CreateScaleSetParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('createRepoScaleSet', 'repoID', repoID) - // verify required parameter 'body' is not null or undefined - assertParamExists('createRepoScaleSet', 'body', body) - const localVarPath = `/repositories/{repoID}/scalesets` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Delete scale set by ID. - * @param {string} scalesetID ID of the scale set to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteScaleSet: async (scalesetID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'scalesetID' is not null or undefined - assertParamExists('deleteScaleSet', 'scalesetID', scalesetID) - const localVarPath = `/scalesets/{scalesetID}` - .replace(`{${"scalesetID"}}`, encodeURIComponent(String(scalesetID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Get scale set by ID. - * @param {string} scalesetID ID of the scale set to fetch. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getScaleSet: async (scalesetID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'scalesetID' is not null or undefined - assertParamExists('getScaleSet', 'scalesetID', scalesetID) - const localVarPath = `/scalesets/{scalesetID}` - .replace(`{${"scalesetID"}}`, encodeURIComponent(String(scalesetID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List enterprise scale sets. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listEnterpriseScaleSets: async (enterpriseID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'enterpriseID' is not null or undefined - assertParamExists('listEnterpriseScaleSets', 'enterpriseID', enterpriseID) - const localVarPath = `/enterprises/{enterpriseID}/scalesets` - .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List organization scale sets. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listOrgScaleSets: async (orgID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'orgID' is not null or undefined - assertParamExists('listOrgScaleSets', 'orgID', orgID) - const localVarPath = `/organizations/{orgID}/scalesets` - .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List repository scale sets. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listRepoScaleSets: async (repoID: string, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'repoID' is not null or undefined - assertParamExists('listRepoScaleSets', 'repoID', repoID) - const localVarPath = `/repositories/{repoID}/scalesets` - .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary List all scalesets. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listScalesets: async (options: RawAxiosRequestConfig = {}): Promise => { - const localVarPath = `/scalesets`; - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary Update scale set by ID. - * @param {string} scalesetID ID of the scale set to update. - * @param {UpdateScaleSetParams} body Parameters to update the scale set with. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateScaleSet: async (scalesetID: string, body: UpdateScaleSetParams, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'scalesetID' is not null or undefined - assertParamExists('updateScaleSet', 'scalesetID', scalesetID) - // verify required parameter 'body' is not null or undefined - assertParamExists('updateScaleSet', 'body', body) - const localVarPath = `/scalesets/{scalesetID}` - .replace(`{${"scalesetID"}}`, encodeURIComponent(String(scalesetID))); - // use dummy base URL string because the URL constructor only accepts absolute URLs. - const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); - let baseOptions; - if (configuration) { - baseOptions = configuration.baseOptions; - } - - const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) - - - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - setSearchParams(localVarUrlObj, localVarQueryParameter); - let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; - localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) - - return { - url: toPathString(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - } -}; - -/** - * ScalesetsApi - functional programming interface - * @export - */ -export const ScalesetsApiFp = function(configuration?: Configuration) { - const localVarAxiosParamCreator = ScalesetsApiAxiosParamCreator(configuration) - return { - /** - * - * @summary Create enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {CreateScaleSetParams} body Parameters used when creating the enterprise scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async createEnterpriseScaleSet(enterpriseID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.createEnterpriseScaleSet(enterpriseID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['ScalesetsApi.createEnterpriseScaleSet']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Create organization scale set with the parameters given. - * @param {string} orgID Organization ID. - * @param {CreateScaleSetParams} body Parameters used when creating the organization scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async createOrgScaleSet(orgID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.createOrgScaleSet(orgID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['ScalesetsApi.createOrgScaleSet']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Create repository scale set with the parameters given. - * @param {string} repoID Repository ID. - * @param {CreateScaleSetParams} body Parameters used when creating the repository scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async createRepoScaleSet(repoID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.createRepoScaleSet(repoID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['ScalesetsApi.createRepoScaleSet']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Delete scale set by ID. - * @param {string} scalesetID ID of the scale set to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async deleteScaleSet(scalesetID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.deleteScaleSet(scalesetID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['ScalesetsApi.deleteScaleSet']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Get scale set by ID. - * @param {string} scalesetID ID of the scale set to fetch. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async getScaleSet(scalesetID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.getScaleSet(scalesetID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['ScalesetsApi.getScaleSet']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List enterprise scale sets. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listEnterpriseScaleSets(enterpriseID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listEnterpriseScaleSets(enterpriseID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['ScalesetsApi.listEnterpriseScaleSets']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List organization scale sets. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listOrgScaleSets(orgID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listOrgScaleSets(orgID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['ScalesetsApi.listOrgScaleSets']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List repository scale sets. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listRepoScaleSets(repoID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listRepoScaleSets(repoID, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['ScalesetsApi.listRepoScaleSets']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary List all scalesets. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async listScalesets(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.listScalesets(options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['ScalesetsApi.listScalesets']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - /** - * - * @summary Update scale set by ID. - * @param {string} scalesetID ID of the scale set to update. - * @param {UpdateScaleSetParams} body Parameters to update the scale set with. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - async updateScaleSet(scalesetID: string, body: UpdateScaleSetParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.updateScaleSet(scalesetID, body, options); - const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['ScalesetsApi.updateScaleSet']?.[localVarOperationServerIndex]?.url; - return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); - }, - } -}; - -/** - * ScalesetsApi - factory interface - * @export - */ -export const ScalesetsApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { - const localVarFp = ScalesetsApiFp(configuration) - return { - /** - * - * @summary Create enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {CreateScaleSetParams} body Parameters used when creating the enterprise scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createEnterpriseScaleSet(enterpriseID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.createEnterpriseScaleSet(enterpriseID, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Create organization scale set with the parameters given. - * @param {string} orgID Organization ID. - * @param {CreateScaleSetParams} body Parameters used when creating the organization scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createOrgScaleSet(orgID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.createOrgScaleSet(orgID, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Create repository scale set with the parameters given. - * @param {string} repoID Repository ID. - * @param {CreateScaleSetParams} body Parameters used when creating the repository scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createRepoScaleSet(repoID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.createRepoScaleSet(repoID, body, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Delete scale set by ID. - * @param {string} scalesetID ID of the scale set to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteScaleSet(scalesetID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.deleteScaleSet(scalesetID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Get scale set by ID. - * @param {string} scalesetID ID of the scale set to fetch. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getScaleSet(scalesetID: string, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.getScaleSet(scalesetID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List enterprise scale sets. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listEnterpriseScaleSets(enterpriseID: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listEnterpriseScaleSets(enterpriseID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List organization scale sets. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listOrgScaleSets(orgID: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listOrgScaleSets(orgID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List repository scale sets. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listRepoScaleSets(repoID: string, options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listRepoScaleSets(repoID, options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary List all scalesets. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listScalesets(options?: RawAxiosRequestConfig): AxiosPromise> { - return localVarFp.listScalesets(options).then((request) => request(axios, basePath)); - }, - /** - * - * @summary Update scale set by ID. - * @param {string} scalesetID ID of the scale set to update. - * @param {UpdateScaleSetParams} body Parameters to update the scale set with. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - updateScaleSet(scalesetID: string, body: UpdateScaleSetParams, options?: RawAxiosRequestConfig): AxiosPromise { - return localVarFp.updateScaleSet(scalesetID, body, options).then((request) => request(axios, basePath)); - }, - }; -}; - -/** - * ScalesetsApi - object-oriented interface - * @export - * @class ScalesetsApi - * @extends {BaseAPI} - */ -export class ScalesetsApi extends BaseAPI { - /** - * - * @summary Create enterprise pool with the parameters given. - * @param {string} enterpriseID Enterprise ID. - * @param {CreateScaleSetParams} body Parameters used when creating the enterprise scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof ScalesetsApi - */ - public createEnterpriseScaleSet(enterpriseID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig) { - return ScalesetsApiFp(this.configuration).createEnterpriseScaleSet(enterpriseID, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Create organization scale set with the parameters given. - * @param {string} orgID Organization ID. - * @param {CreateScaleSetParams} body Parameters used when creating the organization scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof ScalesetsApi - */ - public createOrgScaleSet(orgID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig) { - return ScalesetsApiFp(this.configuration).createOrgScaleSet(orgID, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Create repository scale set with the parameters given. - * @param {string} repoID Repository ID. - * @param {CreateScaleSetParams} body Parameters used when creating the repository scale set. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof ScalesetsApi - */ - public createRepoScaleSet(repoID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig) { - return ScalesetsApiFp(this.configuration).createRepoScaleSet(repoID, body, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Delete scale set by ID. - * @param {string} scalesetID ID of the scale set to delete. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof ScalesetsApi - */ - public deleteScaleSet(scalesetID: string, options?: RawAxiosRequestConfig) { - return ScalesetsApiFp(this.configuration).deleteScaleSet(scalesetID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Get scale set by ID. - * @param {string} scalesetID ID of the scale set to fetch. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof ScalesetsApi - */ - public getScaleSet(scalesetID: string, options?: RawAxiosRequestConfig) { - return ScalesetsApiFp(this.configuration).getScaleSet(scalesetID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List enterprise scale sets. - * @param {string} enterpriseID Enterprise ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof ScalesetsApi - */ - public listEnterpriseScaleSets(enterpriseID: string, options?: RawAxiosRequestConfig) { - return ScalesetsApiFp(this.configuration).listEnterpriseScaleSets(enterpriseID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List organization scale sets. - * @param {string} orgID Organization ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof ScalesetsApi - */ - public listOrgScaleSets(orgID: string, options?: RawAxiosRequestConfig) { - return ScalesetsApiFp(this.configuration).listOrgScaleSets(orgID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List repository scale sets. - * @param {string} repoID Repository ID. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof ScalesetsApi - */ - public listRepoScaleSets(repoID: string, options?: RawAxiosRequestConfig) { - return ScalesetsApiFp(this.configuration).listRepoScaleSets(repoID, options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary List all scalesets. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof ScalesetsApi - */ - public listScalesets(options?: RawAxiosRequestConfig) { - return ScalesetsApiFp(this.configuration).listScalesets(options).then((request) => request(this.axios, this.basePath)); - } - - /** - * - * @summary Update scale set by ID. - * @param {string} scalesetID ID of the scale set to update. - * @param {UpdateScaleSetParams} body Parameters to update the scale set with. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof ScalesetsApi - */ - public updateScaleSet(scalesetID: string, body: UpdateScaleSetParams, options?: RawAxiosRequestConfig) { - return ScalesetsApiFp(this.configuration).updateScaleSet(scalesetID, body, options).then((request) => request(this.axios, this.basePath)); - } -} - - - diff --git a/webapp/src/lib/api/generated/base.ts b/webapp/src/lib/api/generated/base.ts deleted file mode 100644 index 2fa2314d..00000000 --- a/webapp/src/lib/api/generated/base.ts +++ /dev/null @@ -1,86 +0,0 @@ -/* tslint:disable */ -/* eslint-disable */ -/** - * Garm API. - * The Garm API generated using go-swagger. - * - * The version of the OpenAPI document: 1.0.0 - * - * - * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - * https://openapi-generator.tech - * Do not edit the class manually. - */ - - -import type { Configuration } from './configuration'; -// Some imports not used depending on template conditions -// @ts-ignore -import type { AxiosPromise, AxiosInstance, RawAxiosRequestConfig } from 'axios'; -import globalAxios from 'axios'; - -export const BASE_PATH = "/api/v1".replace(/\/+$/, ""); - -/** - * - * @export - */ -export const COLLECTION_FORMATS = { - csv: ",", - ssv: " ", - tsv: "\t", - pipes: "|", -}; - -/** - * - * @export - * @interface RequestArgs - */ -export interface RequestArgs { - url: string; - options: RawAxiosRequestConfig; -} - -/** - * - * @export - * @class BaseAPI - */ -export class BaseAPI { - protected configuration: Configuration | undefined; - - constructor(configuration?: Configuration, protected basePath: string = BASE_PATH, protected axios: AxiosInstance = globalAxios) { - if (configuration) { - this.configuration = configuration; - this.basePath = configuration.basePath ?? basePath; - } - } -}; - -/** - * - * @export - * @class RequiredError - * @extends {Error} - */ -export class RequiredError extends Error { - constructor(public field: string, msg?: string) { - super(msg); - this.name = "RequiredError" - } -} - -interface ServerMap { - [key: string]: { - url: string, - description: string, - }[]; -} - -/** - * - * @export - */ -export const operationServerMap: ServerMap = { -} diff --git a/webapp/src/lib/api/generated/common.ts b/webapp/src/lib/api/generated/common.ts deleted file mode 100644 index a1ef3fb4..00000000 --- a/webapp/src/lib/api/generated/common.ts +++ /dev/null @@ -1,150 +0,0 @@ -/* tslint:disable */ -/* eslint-disable */ -/** - * Garm API. - * The Garm API generated using go-swagger. - * - * The version of the OpenAPI document: 1.0.0 - * - * - * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - * https://openapi-generator.tech - * Do not edit the class manually. - */ - - -import type { Configuration } from "./configuration"; -import type { RequestArgs } from "./base"; -import type { AxiosInstance, AxiosResponse } from 'axios'; -import { RequiredError } from "./base"; - -/** - * - * @export - */ -export const DUMMY_BASE_URL = 'https://example.com' - -/** - * - * @throws {RequiredError} - * @export - */ -export const assertParamExists = function (functionName: string, paramName: string, paramValue: unknown) { - if (paramValue === null || paramValue === undefined) { - throw new RequiredError(paramName, `Required parameter ${paramName} was null or undefined when calling ${functionName}.`); - } -} - -/** - * - * @export - */ -export const setApiKeyToObject = async function (object: any, keyParamName: string, configuration?: Configuration) { - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? await configuration.apiKey(keyParamName) - : await configuration.apiKey; - object[keyParamName] = localVarApiKeyValue; - } -} - -/** - * - * @export - */ -export const setBasicAuthToObject = function (object: any, configuration?: Configuration) { - if (configuration && (configuration.username || configuration.password)) { - object["auth"] = { username: configuration.username, password: configuration.password }; - } -} - -/** - * - * @export - */ -export const setBearerAuthToObject = async function (object: any, configuration?: Configuration) { - if (configuration && configuration.accessToken) { - const accessToken = typeof configuration.accessToken === 'function' - ? await configuration.accessToken() - : await configuration.accessToken; - object["Authorization"] = "Bearer " + accessToken; - } -} - -/** - * - * @export - */ -export const setOAuthToObject = async function (object: any, name: string, scopes: string[], configuration?: Configuration) { - if (configuration && configuration.accessToken) { - const localVarAccessTokenValue = typeof configuration.accessToken === 'function' - ? await configuration.accessToken(name, scopes) - : await configuration.accessToken; - object["Authorization"] = "Bearer " + localVarAccessTokenValue; - } -} - -function setFlattenedQueryParams(urlSearchParams: URLSearchParams, parameter: any, key: string = ""): void { - if (parameter == null) return; - if (typeof parameter === "object") { - if (Array.isArray(parameter)) { - (parameter as any[]).forEach(item => setFlattenedQueryParams(urlSearchParams, item, key)); - } - else { - Object.keys(parameter).forEach(currentKey => - setFlattenedQueryParams(urlSearchParams, parameter[currentKey], `${key}${key !== '' ? '.' : ''}${currentKey}`) - ); - } - } - else { - if (urlSearchParams.has(key)) { - urlSearchParams.append(key, parameter); - } - else { - urlSearchParams.set(key, parameter); - } - } -} - -/** - * - * @export - */ -export const setSearchParams = function (url: URL, ...objects: any[]) { - const searchParams = new URLSearchParams(url.search); - setFlattenedQueryParams(searchParams, objects); - url.search = searchParams.toString(); -} - -/** - * - * @export - */ -export const serializeDataIfNeeded = function (value: any, requestOptions: any, configuration?: Configuration) { - const nonString = typeof value !== 'string'; - const needsSerialization = nonString && configuration && configuration.isJsonMime - ? configuration.isJsonMime(requestOptions.headers['Content-Type']) - : nonString; - return needsSerialization - ? JSON.stringify(value !== undefined ? value : {}) - : (value || ""); -} - -/** - * - * @export - */ -export const toPathString = function (url: URL) { - return url.pathname + url.search + url.hash -} - -/** - * - * @export - */ -export const createRequestFunction = function (axiosArgs: RequestArgs, globalAxios: AxiosInstance, BASE_PATH: string, configuration?: Configuration) { - return >(axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => { - const axiosRequestArgs = {...axiosArgs.options, url: (axios.defaults.baseURL ? '' : configuration?.basePath ?? basePath) + axiosArgs.url}; - return axios.request(axiosRequestArgs); - }; -} diff --git a/webapp/src/lib/api/generated/configuration.ts b/webapp/src/lib/api/generated/configuration.ts deleted file mode 100644 index d71ed227..00000000 --- a/webapp/src/lib/api/generated/configuration.ts +++ /dev/null @@ -1,115 +0,0 @@ -/* tslint:disable */ -/* eslint-disable */ -/** - * Garm API. - * The Garm API generated using go-swagger. - * - * The version of the OpenAPI document: 1.0.0 - * - * - * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - * https://openapi-generator.tech - * Do not edit the class manually. - */ - - -export interface ConfigurationParameters { - apiKey?: string | Promise | ((name: string) => string) | ((name: string) => Promise); - username?: string; - password?: string; - accessToken?: string | Promise | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise); - basePath?: string; - serverIndex?: number; - baseOptions?: any; - formDataCtor?: new () => any; -} - -export class Configuration { - /** - * parameter for apiKey security - * @param name security name - * @memberof Configuration - */ - apiKey?: string | Promise | ((name: string) => string) | ((name: string) => Promise); - /** - * parameter for basic security - * - * @type {string} - * @memberof Configuration - */ - username?: string; - /** - * parameter for basic security - * - * @type {string} - * @memberof Configuration - */ - password?: string; - /** - * parameter for oauth2 security - * @param name security name - * @param scopes oauth2 scope - * @memberof Configuration - */ - accessToken?: string | Promise | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise); - /** - * override base path - * - * @type {string} - * @memberof Configuration - */ - basePath?: string; - /** - * override server index - * - * @type {number} - * @memberof Configuration - */ - serverIndex?: number; - /** - * base options for axios calls - * - * @type {any} - * @memberof Configuration - */ - baseOptions?: any; - /** - * The FormData constructor that will be used to create multipart form data - * requests. You can inject this here so that execution environments that - * do not support the FormData class can still run the generated client. - * - * @type {new () => FormData} - */ - formDataCtor?: new () => any; - - constructor(param: ConfigurationParameters = {}) { - this.apiKey = param.apiKey; - this.username = param.username; - this.password = param.password; - this.accessToken = param.accessToken; - this.basePath = param.basePath; - this.serverIndex = param.serverIndex; - this.baseOptions = { - ...param.baseOptions, - headers: { - ...param.baseOptions?.headers, - }, - }; - this.formDataCtor = param.formDataCtor; - } - - /** - * Check if the given MIME is a JSON MIME. - * JSON MIME examples: - * application/json - * application/json; charset=UTF8 - * APPLICATION/JSON - * application/vnd.company+json - * @param mime - MIME (Multipurpose Internet Mail Extensions) - * @return True if the given MIME is JSON, false otherwise. - */ - public isJsonMime(mime: string): boolean { - const jsonMime: RegExp = new RegExp('^(application\/json|[^;/ \t]+\/[^;/ \t]+[+]json)[ \t]*(;.*)?$', 'i'); - return mime !== null && (jsonMime.test(mime) || mime.toLowerCase() === 'application/json-patch+json'); - } -} diff --git a/webapp/src/lib/api/generated/index.ts b/webapp/src/lib/api/generated/index.ts deleted file mode 100644 index c5c83e0c..00000000 --- a/webapp/src/lib/api/generated/index.ts +++ /dev/null @@ -1,18 +0,0 @@ -/* tslint:disable */ -/* eslint-disable */ -/** - * Garm API. - * The Garm API generated using go-swagger. - * - * The version of the OpenAPI document: 1.0.0 - * - * - * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - * https://openapi-generator.tech - * Do not edit the class manually. - */ - - -export * from "./api"; -export * from "./configuration"; - diff --git a/webapp/src/lib/components/ActionButton.svelte b/webapp/src/lib/components/ActionButton.svelte deleted file mode 100644 index 61236bd1..00000000 --- a/webapp/src/lib/components/ActionButton.svelte +++ /dev/null @@ -1,68 +0,0 @@ - - - \ No newline at end of file diff --git a/webapp/src/lib/components/Badge.svelte b/webapp/src/lib/components/Badge.svelte deleted file mode 100644 index cd75174f..00000000 --- a/webapp/src/lib/components/Badge.svelte +++ /dev/null @@ -1,48 +0,0 @@ - - - - {text} - \ No newline at end of file diff --git a/webapp/src/lib/components/Button.svelte b/webapp/src/lib/components/Button.svelte deleted file mode 100644 index 3f98d4dc..00000000 --- a/webapp/src/lib/components/Button.svelte +++ /dev/null @@ -1,82 +0,0 @@ - - - \ No newline at end of file diff --git a/webapp/src/lib/components/ControllerInfoCard.svelte b/webapp/src/lib/components/ControllerInfoCard.svelte deleted file mode 100644 index 36533d64..00000000 --- a/webapp/src/lib/components/ControllerInfoCard.svelte +++ /dev/null @@ -1,403 +0,0 @@ - - -
                -
                - -
                -
                -
                - - - -
                -
                -

                Controller Information

                -
                - - v{controllerInfo.version?.replace(/^v/, '') || 'Unknown'} - -
                -
                -
                - - -
                - - -
                - -
                -
                -

                Identity

                -
                - -
                -
                Controller ID
                -
                - {controllerInfo.controller_id} -
                -
                - - -
                -
                Hostname
                -
                - {controllerInfo.hostname || 'Unknown'} -
                -
                - - -
                -
                -
                Job Age Backoff
                -
                - -
                -
                -
                - {controllerInfo.minimum_job_age_backoff || 30}s -
                -
                -
                -
                -
                - - -
                -
                -

                Integration URLs

                -
                - - {#if controllerInfo.metadata_url} -
                -
                -
                Metadata
                -
                - -
                -
                -
                - {controllerInfo.metadata_url} -
                -
                - {/if} - - - {#if controllerInfo.callback_url} -
                -
                -
                Callback
                -
                - -
                -
                -
                - {controllerInfo.callback_url} -
                -
                - {/if} - - - {#if controllerInfo.webhook_url} -
                -
                -
                Webhook
                -
                - -
                -
                -
                - {controllerInfo.webhook_url} -
                -
                - {/if} - - - {#if !controllerInfo.metadata_url && !controllerInfo.callback_url && !controllerInfo.webhook_url} -
                - - - -

                No URLs configured

                - -
                - {/if} -
                -
                -
                -
                - - - {#if controllerInfo.controller_webhook_url} -
                -
                -
                Controller Webhook URL
                -
                - -
                -
                -
                -
                -
                - - - -
                -
                - - {controllerInfo.controller_webhook_url} - -

                - Use this URL in your GitHub organization/repository webhook settings -

                -
                -
                -
                -
                - {/if} -
                -
                - - -{#if showSettingsModal} - -
                -

                Controller Settings

                - -
                - -
                - - - {#if !isValidUrl(metadataUrl)} -

                Please enter a valid URL

                - {/if} -

                - URL where runners can fetch metadata and setup information -

                -
                - - -
                - - - {#if !isValidUrl(callbackUrl)} -

                Please enter a valid URL

                - {/if} -

                - URL where runners send status updates and lifecycle events -

                -
                - - -
                - - - {#if !isValidUrl(webhookUrl)} -

                Please enter a valid URL

                - {/if} -

                - URL where GitHub/Gitea will send webhook events for job notifications -

                -
                - - -
                - - -

                - Time to wait before spinning up a runner for a new job (0 = immediate) -

                -
                - - -
                - - -
                -
                -
                -
                -{/if} \ No newline at end of file diff --git a/webapp/src/lib/components/CreateEnterpriseModal.svelte b/webapp/src/lib/components/CreateEnterpriseModal.svelte deleted file mode 100644 index e759270c..00000000 --- a/webapp/src/lib/components/CreateEnterpriseModal.svelte +++ /dev/null @@ -1,213 +0,0 @@ - - - dispatch('close')}> -
                -

                Create Enterprise

                -

                - Enterprises are only available for GitHub endpoints. -

                - - {#if error} -
                -

                {error}

                -
                - {/if} - - {#if loading} -
                -
                -

                Loading...

                -
                - {:else} -
                - -
                - - -
                - - -
                - - - {#if credentialsLoading} -

                - Loading credentials... -

                - {:else if filteredCredentials.length === 0} -

                - No GitHub credentials found. Please create GitHub credentials first. -

                - {/if} -
                - - -
                -
                - -
                - - - - -
                -
                - -
                - - -
                - - -

                - You'll need to manually configure this secret in GitHub's enterprise webhook settings. -

                -
                - - -
                - - -
                -
                - {/if} -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/CreateOrganizationModal.svelte b/webapp/src/lib/components/CreateOrganizationModal.svelte deleted file mode 100644 index 913c8eb8..00000000 --- a/webapp/src/lib/components/CreateOrganizationModal.svelte +++ /dev/null @@ -1,270 +0,0 @@ - - - dispatch('close')}> -
                -

                Create Organization

                - - {#if error} -
                -

                {error}

                -
                - {/if} - - {#if loading} -
                -
                -

                Loading...

                -
                - {:else} -
                - - - - -
                - - -
                - - -
                - - -
                - - -
                -
                - -
                - - - - -
                -
                - -
                - - -
                -
                - - -
                - -
                -
                - - -
                - - {#if !generateWebhookSecret} - - {:else} -

                - Webhook secret will be automatically generated -

                - {/if} -
                -
                - - -
                - - -
                - - {/if} -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/CreatePoolModal.anti-duplication.md b/webapp/src/lib/components/CreatePoolModal.anti-duplication.md deleted file mode 100644 index 514ddcf6..00000000 --- a/webapp/src/lib/components/CreatePoolModal.anti-duplication.md +++ /dev/null @@ -1,113 +0,0 @@ -# Pool Creation Anti-Duplication Design - -## Problem Fixed - -The CreatePoolModal was causing duplicate pool creation because both the modal component AND parent components were making API calls to create pools. This resulted in two identical pools being created. - -## Root Cause - -1. **CreatePoolModal.svelte** always called the API (e.g., `garmApi.createRepositoryPool()`) in its `handleSubmit` function -2. **Parent components** (repository/organization/enterprise detail pages) also called the same API when handling the modal's submit event -3. Both calls succeeded, creating duplicate pools - -## Solution Architecture - -The fix implements **conditional API calling** based on usage context: - -### Entity Detail Pages (repositories/[id], organizations/[id], enterprises/[id]) -- **Modal Role**: Validate form, dispatch submit event with parameters -- **Parent Role**: Handle API call, show success/error messages, manage modal state -- **API Call Made By**: Parent component only - -```typescript -// Modal logic -if (initialEntityType && initialEntityId) { - // Entity pages: parent handles the API call - dispatch('submit', params); -} else { - // Global page: modal handles the API call - await garmApi.createRepositoryPool(selectedEntityId, params); - dispatch('submit', params); -} -``` - -### Global Pools Page (/pools) -- **Modal Role**: Collect entity selection, validate form, make API call, dispatch submit event -- **Parent Role**: Show success message, manage modal state only -- **API Call Made By**: Modal component only - -## Implementation Details - -### CreatePoolModal.svelte Changes -- Added conditional logic in `handleSubmit()` method -- Checks for `initialEntityType` and `initialEntityId` props -- Only makes API calls when these props are NOT provided (global page scenario) - -### Parent Component Changes -- Repository detail page: Error handling improved -- Organization detail page: Error handling improved -- Enterprise detail page: Error handling improved -- Global pools page: Receives submit event correctly - -## Testing Strategy - -### Unit Tests -- `CreatePoolModal.simple.test.ts`: Tests modal rendering and basic functionality -- `CreatePoolModal.test.ts`: Comprehensive API call prevention tests (needs Svelte 5 updates) - -### Integration Tests -- `pool-creation-anti-duplication.test.ts`: Regression prevention and architecture verification - -### Key Test Cases -1. **Entity page usage**: Verify modal does NOT call API -2. **Global page usage**: Verify modal DOES call API -3. **Single API call**: Ensure exactly one API call per pool creation -4. **Error handling**: Proper error handling in both scenarios - -## Preventing Future Regressions - -### For Developers -1. **Always check context** when adding new entity types -2. **Follow the pattern**: Use `initialEntityType` to determine API call responsibility -3. **Run tests** before committing modal or parent component changes - -### Code Review Checklist -- [ ] Does the modal make conditional API calls? -- [ ] Do parent components handle their responsibilities correctly? -- [ ] Are there tests covering the new functionality? -- [ ] Is there exactly one source of API calls per scenario? - -## Responsibility Matrix - -| Scenario | Modal Responsibilities | Parent Responsibilities | -|----------|----------------------|------------------------| -| Entity Detail Page | • Validate form
                • Dispatch submit event | • Make API call
                • Handle success/error
                • Manage modal state | -| Global Pools Page | • Validate form
                • Make API call
                • Dispatch submit event | • Handle success message
                • Manage modal state | - -## File Changes Summary - -### Modified Files -- `src/lib/components/CreatePoolModal.svelte` - Added conditional API calling -- `src/routes/repositories/[id]/+page.svelte` - Fixed error handling -- `src/routes/organizations/[id]/+page.svelte` - Fixed error handling -- `src/routes/enterprises/[id]/+page.svelte` - Fixed error handling -- `src/routes/pools/+page.svelte` - Updated event handler - -### New Test Files -- `src/lib/components/CreatePoolModal.simple.test.ts` -- `src/routes/repositories/[id]/pool-creation.test.ts` -- `src/routes/pools/pool-creation.test.ts` -- `src/integration/pool-creation-anti-duplication.test.ts` - -## Error Scenarios Handled - -1. **API failures from entity pages**: Parent shows toast, keeps modal open -2. **API failures from global page**: Modal handles error display -3. **Network errors**: Graceful degradation in both scenarios -4. **Validation errors**: Handled before API calls are made - -## Performance Impact - -- **Positive**: Reduces API calls by 50% (no duplicate calls) -- **Neutral**: No additional network requests or computational overhead -- **Improved**: Better user experience with consistent error handling \ No newline at end of file diff --git a/webapp/src/lib/components/CreatePoolModal.simple.test.ts b/webapp/src/lib/components/CreatePoolModal.simple.test.ts deleted file mode 100644 index 2fd93a81..00000000 --- a/webapp/src/lib/components/CreatePoolModal.simple.test.ts +++ /dev/null @@ -1,171 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render } from '@testing-library/svelte'; -import CreatePoolModal from './CreatePoolModal.svelte'; - -// Simple mock for the API client -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - listProviders: vi.fn().mockResolvedValue([]), - listRepositories: vi.fn().mockResolvedValue([]), - listOrganizations: vi.fn().mockResolvedValue([]), - listEnterprises: vi.fn().mockResolvedValue([]), - createRepositoryPool: vi.fn().mockResolvedValue({ id: 'pool1' }), - createOrganizationPool: vi.fn().mockResolvedValue({ id: 'pool2' }), - createEnterprisePool: vi.fn().mockResolvedValue({ id: 'pool3' }) - } -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -// Simple mock for Modal component -vi.mock('$lib/components/Modal.svelte', () => ({ - default: function MockModal() { - return { $destroy: vi.fn(), $set: vi.fn(), $on: vi.fn() }; - } -})); - -// Simple mock for JsonEditor component -vi.mock('$lib/components/JsonEditor.svelte', () => ({ - default: function MockJsonEditor() { - return { $destroy: vi.fn(), $set: vi.fn(), $on: vi.fn() }; - } -})); - -describe('CreatePoolModal - Duplicate Prevention Core Tests', () => { - beforeEach(() => { - vi.clearAllMocks(); - }); - - describe('Conditional API Call Logic', () => { - it('should understand the conditional logic in handleSubmit', async () => { - // This test verifies that the modal has the right conditional logic - // to prevent duplicate API calls - - const component = render(CreatePoolModal, { - props: { - initialEntityType: 'repository', - initialEntityId: 'repo1' - } - }); - - // Component should render without errors - expect(component.component).toBeDefined(); - - // The key test is that the modal understands when to make API calls - // vs when to let parent components handle them - expect(true).toBe(true); // Basic smoke test - }); - - it('should render without props for global page usage', async () => { - const component = render(CreatePoolModal, { - props: {} - }); - - // Component should render without errors even without initial props - expect(component.component).toBeDefined(); - }); - }); - - describe('API Mock Verification', () => { - it('should have API methods available for testing', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - // Verify all the API methods are mocked - expect(garmApi.listProviders).toBeDefined(); - expect(garmApi.listRepositories).toBeDefined(); - expect(garmApi.listOrganizations).toBeDefined(); - expect(garmApi.listEnterprises).toBeDefined(); - expect(garmApi.createRepositoryPool).toBeDefined(); - expect(garmApi.createOrganizationPool).toBeDefined(); - expect(garmApi.createEnterprisePool).toBeDefined(); - }); - - it('should verify API calls are not made during component render', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - // Reset mocks to ensure clean state - vi.clearAllMocks(); - - // Render component with entity detail page props - render(CreatePoolModal, { - props: { - initialEntityType: 'repository', - initialEntityId: 'repo1' - } - }); - - // During render, no pool creation APIs should be called - expect(garmApi.createRepositoryPool).not.toHaveBeenCalled(); - expect(garmApi.createOrganizationPool).not.toHaveBeenCalled(); - expect(garmApi.createEnterprisePool).not.toHaveBeenCalled(); - - // Loading APIs should be called (this is expected) - // We're not testing timing here, just that creation APIs aren't called - }); - }); - - describe('Props Handling', () => { - it('should handle initialEntityType and initialEntityId props', () => { - const component = render(CreatePoolModal, { - props: { - initialEntityType: 'repository', - initialEntityId: 'repo1' - } - }); - - // Component should accept and handle these props - expect(component.component).toBeDefined(); - }); - - it('should handle empty props for global page usage', () => { - const component = render(CreatePoolModal, { - props: {} - }); - - // Component should work without initial entity props - expect(component.component).toBeDefined(); - }); - }); - - describe('Event Handling', () => { - it('should have event dispatching capability', () => { - const component = render(CreatePoolModal, { - props: { - initialEntityType: 'repository', - initialEntityId: 'repo1' - } - }); - - // Component should render without errors (event handling tested via integration) - expect(component.component).toBeDefined(); - }); - }); - - describe('Component Structure', () => { - it('should render core UI elements', () => { - const component = render(CreatePoolModal, { - props: { - initialEntityType: 'repository', - initialEntityId: 'repo1' - } - }); - - // Component should render its structure - expect(component.container).toBeInTheDocument(); - }); - - it('should handle component lifecycle', () => { - const component = render(CreatePoolModal, { - props: {} - }); - - // Component should mount without errors - expect(component.component).toBeDefined(); - - // Component should unmount without errors - expect(() => component.unmount()).not.toThrow(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/lib/components/CreatePoolModal.svelte b/webapp/src/lib/components/CreatePoolModal.svelte deleted file mode 100644 index daf21f0a..00000000 --- a/webapp/src/lib/components/CreatePoolModal.svelte +++ /dev/null @@ -1,547 +0,0 @@ - - - dispatch('close')}> -
                -
                -

                Create New Pool

                -
                - -
                - {#if error} -
                -

                {error}

                -
                - {/if} - - -
                - - Entity Level * - -
                - - - -
                -
                - - {#if entityLevel} - -
                -

                - Entity & Provider Configuration -

                -
                -
                - - {#if loadingEntities} -
                - {:else} - - {/if} -
                -
                - - {#if loadingProviders} -
                - {:else} - - {/if} -
                -
                -
                - - -
                -

                - Image & OS Configuration -

                -
                -
                - - -
                -
                - - -
                -
                - - -
                -
                - - -
                -
                -
                - - -
                -

                - Runner Limits & Timing -

                -
                -
                - - -
                -
                - - -
                -
                - - -
                -
                -
                - - -
                -

                - Advanced Settings -

                -
                -
                - - -
                -
                - - -
                -
                - - -
                -
                - - -
                - -
                -
                - - -
                - {#if tags.length > 0} -
                - {#each tags as tag, index} - - {tag} - - - {/each} -
                - {/if} -
                -
                - - -
                - - Extra Specs (JSON) - - -
                - - -
                - - -
                -
                - {/if} - - -
                - - -
                -
                -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/CreateRepositoryModal.svelte b/webapp/src/lib/components/CreateRepositoryModal.svelte deleted file mode 100644 index 0ce1fa10..00000000 --- a/webapp/src/lib/components/CreateRepositoryModal.svelte +++ /dev/null @@ -1,293 +0,0 @@ - - - dispatch('close')}> -
                -

                Create Repository

                - - {#if error} -
                -

                {error}

                -
                - {/if} - - {#if loading} -
                -
                -

                Loading...

                -
                - {:else} -
                - - - - -
                - - -
                - - -
                - - -
                - - -
                - - -
                - - -
                -
                - -
                - - - - -
                -
                - -
                - - -
                -
                - - -
                - -
                -
                - - -
                - - {#if !generateWebhookSecret} - - {:else} -

                - Webhook secret will be automatically generated -

                - {/if} -
                -
                - - -
                - - -
                - - {/if} -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/CreateScaleSetModal.svelte b/webapp/src/lib/components/CreateScaleSetModal.svelte deleted file mode 100644 index 489a4808..00000000 --- a/webapp/src/lib/components/CreateScaleSetModal.svelte +++ /dev/null @@ -1,473 +0,0 @@ - - - dispatch('close')}> -
                -
                -

                Create New Scale Set

                -

                Scale sets are only available for GitHub endpoints

                -
                - -
                - {#if error} -
                -

                {error}

                -
                - {/if} - - -
                - - -
                - - -
                -
                - - Entity Level * - -
                - - - -
                -
                -
                - - {#if entityLevel} - -
                -

                - Entity & Provider Configuration -

                -
                -
                - - {#if loadingEntities} -
                - {:else} - - {/if} -
                -
                - - {#if loadingProviders} -
                - {:else} - - {/if} -
                -
                -
                - - -
                -

                - Image & OS Configuration -

                -
                -
                - - -
                -
                - - -
                -
                - - -
                -
                - - -
                -
                -
                - - -
                -

                - Runner Limits & Timing -

                -
                -
                - - -
                -
                - - -
                -
                - - -
                -
                -
                - - -
                -

                - Advanced Settings -

                -
                -
                - - -
                -
                - - -
                -
                - - -
                -
                - Extra Specs (JSON) -
                - -
                - - -
                - - -
                -
                - {/if} - - -
                - - -
                -
                -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/DataTable.svelte b/webapp/src/lib/components/DataTable.svelte deleted file mode 100644 index 4723420c..00000000 --- a/webapp/src/lib/components/DataTable.svelte +++ /dev/null @@ -1,237 +0,0 @@ - - -
                - {#if showSearch} - - {/if} - -
                - {#if loading} - - {:else if error} - - {:else if data.length === 0} - - {:else} - {#if showMobileCards} - -
                - {#each data as item, index (item.id || item.name || index)} -
                - {#if mobileCardConfig} - - {#key `${item.id || item.name}-${item.updated_at}-mobile`} - - {/key} - {:else} - - - {/if} -
                - {/each} -
                - {/if} - - - - {/if} - - {#if showPagination && !loading && !error && data.length > 0} - - {/if} -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/DeleteModal.svelte b/webapp/src/lib/components/DeleteModal.svelte deleted file mode 100644 index 88a0922d..00000000 --- a/webapp/src/lib/components/DeleteModal.svelte +++ /dev/null @@ -1,57 +0,0 @@ - - - dispatch('close')}> -
                -
                - - - -
                - -
                -

                {title}

                -
                -

                {message}

                - {#if itemName} -

                {itemName}

                - {/if} -
                -
                - -
                - - -
                -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/DetailHeader.svelte b/webapp/src/lib/components/DetailHeader.svelte deleted file mode 100644 index 3d675b4d..00000000 --- a/webapp/src/lib/components/DetailHeader.svelte +++ /dev/null @@ -1,56 +0,0 @@ - - -
                -
                -
                -
                - {#if forgeIcon} -
                - {@html forgeIcon} -
                - {/if} -
                -

                {title}

                -

                - {subtitle} -

                -
                -
                - {#if onEdit || onDelete} -
                - {#if onEdit} - - {/if} - {#if onDelete} - - {/if} -
                - {/if} -
                -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/EmptyState.svelte b/webapp/src/lib/components/EmptyState.svelte deleted file mode 100644 index 51eaee9d..00000000 --- a/webapp/src/lib/components/EmptyState.svelte +++ /dev/null @@ -1,37 +0,0 @@ - - -
                - {#if iconType === 'document'} - - - - {:else if iconType === 'building'} - - - - {:else if iconType === 'users'} - - - - {:else if iconType === 'cog'} - - - - - {:else if iconType === 'key'} - - - - {:else if iconType === 'settings'} - - - - - {/if} -

                {title}

                -

                {message}

                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/EntityInformation.svelte b/webapp/src/lib/components/EntityInformation.svelte deleted file mode 100644 index c7e65496..00000000 --- a/webapp/src/lib/components/EntityInformation.svelte +++ /dev/null @@ -1,103 +0,0 @@ - - -
                -
                -

                {getEntityTitle()}

                -
                -
                -
                ID
                -
                {entity.id}
                -
                -
                -
                Created At
                -
                {formatDate(entity.created_at)}
                -
                -
                -
                Updated At
                -
                {formatDate(entity.updated_at)}
                -
                -
                -
                Status
                -
                - {#if entity.pool_manager_status?.running} - - {:else} - - {/if} -
                -
                -
                -
                Pool Balancer Type
                -
                {getPoolBalancerDisplay()}
                -
                -
                -
                {getUrlLabel()}
                -
                - - {getEntityUrl()} - - - - -
                -
                -
                -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/ErrorState.svelte b/webapp/src/lib/components/ErrorState.svelte deleted file mode 100644 index ee42c73e..00000000 --- a/webapp/src/lib/components/ErrorState.svelte +++ /dev/null @@ -1,37 +0,0 @@ - - -
                -
                -
                -
                - - - -
                -
                -

                {title}

                -

                {message}

                - {#if showRetry && onRetry} -
                - -
                - {/if} -
                -
                -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/EventsSection.svelte b/webapp/src/lib/components/EventsSection.svelte deleted file mode 100644 index b91668f4..00000000 --- a/webapp/src/lib/components/EventsSection.svelte +++ /dev/null @@ -1,47 +0,0 @@ - - -{#if events && events.length > 0} -
                -
                -

                Events

                -
                - {#each events as event} -
                -
                -

                {event.message}

                -
                - {#if (event.event_level || 'info').toLowerCase() === 'error'} - - {:else if (event.event_level || 'info').toLowerCase() === 'warning'} - - {:else} - - {/if} - {formatDate(event.created_at)} -
                -
                -
                - {/each} -
                -
                -
                -{:else} -
                -
                -

                Events

                -
                - - - -

                No events available

                -
                -
                -
                -{/if} \ No newline at end of file diff --git a/webapp/src/lib/components/ForgeTypeSelector.svelte b/webapp/src/lib/components/ForgeTypeSelector.svelte deleted file mode 100644 index 68dbe187..00000000 --- a/webapp/src/lib/components/ForgeTypeSelector.svelte +++ /dev/null @@ -1,40 +0,0 @@ - - -
                - - {label} - -
                - - -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/Icons.svelte b/webapp/src/lib/components/Icons.svelte deleted file mode 100644 index c38fadfe..00000000 --- a/webapp/src/lib/components/Icons.svelte +++ /dev/null @@ -1,51 +0,0 @@ - - - - {@html iconPath} - \ No newline at end of file diff --git a/webapp/src/lib/components/InstancesSection.svelte b/webapp/src/lib/components/InstancesSection.svelte deleted file mode 100644 index 82e5ebfb..00000000 --- a/webapp/src/lib/components/InstancesSection.svelte +++ /dev/null @@ -1,114 +0,0 @@ - - -
                -
                -
                -

                Instances ({instances.length})

                - View all instances -
                - -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/JsonEditor.svelte b/webapp/src/lib/components/JsonEditor.svelte deleted file mode 100644 index 443c6e25..00000000 --- a/webapp/src/lib/components/JsonEditor.svelte +++ /dev/null @@ -1,48 +0,0 @@ - - -
                - - - {#if !isValidJson} -
                - - - -
                - {/if} -
                \ No newline at end of file diff --git a/webapp/src/lib/components/LoadingState.svelte b/webapp/src/lib/components/LoadingState.svelte deleted file mode 100644 index 0a9985c2..00000000 --- a/webapp/src/lib/components/LoadingState.svelte +++ /dev/null @@ -1,8 +0,0 @@ - - -
                -
                -

                {message}

                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/MobileCard.svelte b/webapp/src/lib/components/MobileCard.svelte deleted file mode 100644 index 4610ee0f..00000000 --- a/webapp/src/lib/components/MobileCard.svelte +++ /dev/null @@ -1,250 +0,0 @@ - - -
                -
                - {#if config.primaryText.isClickable} - -

                {getPrimaryText()}

                - {#if config.secondaryText} -

                - {getSecondaryText()} -

                - {/if} -
                - {:else} -
                -

                {getPrimaryText()}

                - {#if config.secondaryText} -

                - {getSecondaryText()} -

                - {/if} -
                - {/if} - - {#if config.customInfo || config.badges?.some(b => b.type === 'forge')} -
                - {#if config.customInfo} - {#each config.customInfo as info} - {@const iconHtml = typeof info.icon === 'function' ? info.icon(item) : info.icon} - {@const text = typeof info.text === 'function' ? info.text(item) : info.text} -
                - {#if iconHtml} - {@html iconHtml} - {/if} - {text} -
                - {/each} - {/if} - - {#if config.badges} - {#each config.badges.filter(b => b.type === 'forge') as badge} -
                - {@html getForgeIcon(badge.field ? (item?.[badge.field] || 'unknown') : (item?.endpoint?.endpoint_type || 'unknown'))} - - {item?.endpoint?.name || 'Unknown'} - -
                - {/each} - {/if} -
                - {/if} -
                - -
                - {#if config.badges} - {#each config.badges.filter(b => b.type !== 'forge') as badge} - {#if badge.type === 'status'} - {@const badgeProps = getBadgeProps(badge)} - - {badgeProps.text} - - {:else} - {@const badgeProps = getBadgeProps(badge)} - - {/if} - {/each} - {/if} - - {#if config.actions} -
                - {#each config.actions as action} - handleAction(action.type)} - /> - {/each} -
                - {/if} -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/Modal.svelte b/webapp/src/lib/components/Modal.svelte deleted file mode 100644 index b446f2ee..00000000 --- a/webapp/src/lib/components/Modal.svelte +++ /dev/null @@ -1,40 +0,0 @@ - - - diff --git a/webapp/src/lib/components/Navigation.svelte b/webapp/src/lib/components/Navigation.svelte deleted file mode 100644 index d4d9a721..00000000 --- a/webapp/src/lib/components/Navigation.svelte +++ /dev/null @@ -1,406 +0,0 @@ - - - - - - -
                -
                - - - - -
                - GARM - GARM -

                GARM

                - - -
                - {#if wsState.connected} -
                -
                -
                - {:else if wsState.connecting} -
                -
                -
                - {:else if wsState.error} -
                -
                -
                - {:else} -
                -
                -
                - {/if} -
                -
                - - - -
                - - - {#if mobileMenuOpen} - - {/if} -
                - - -{#if userMenuOpen} -
                userMenuOpen = false} on:keydown={(e) => { if (e.key === 'Escape') userMenuOpen = false; }} role="button" tabindex="0" aria-label="Close user menu">
                -{/if} \ No newline at end of file diff --git a/webapp/src/lib/components/PageHeader.svelte b/webapp/src/lib/components/PageHeader.svelte deleted file mode 100644 index 4ac247af..00000000 --- a/webapp/src/lib/components/PageHeader.svelte +++ /dev/null @@ -1,39 +0,0 @@ - - - -
                -
                -

                {title}

                -

                - {description} -

                -
                - {#if showAction && actionLabel} -
                - -
                - {/if} -
                \ No newline at end of file diff --git a/webapp/src/lib/components/PoolsSection.svelte b/webapp/src/lib/components/PoolsSection.svelte deleted file mode 100644 index 273fd6fb..00000000 --- a/webapp/src/lib/components/PoolsSection.svelte +++ /dev/null @@ -1,136 +0,0 @@ - - -
                -
                -
                -

                Pools ({pools.length})

                - View all pools -
                - {#if pools.length === 0} - -
                - - - - -

                No pools configured

                -

                No pools configured for this {entityType}.

                -
                - -
                -
                - {:else} - - {/if} -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/SearchBar.svelte b/webapp/src/lib/components/SearchBar.svelte deleted file mode 100644 index bc051b14..00000000 --- a/webapp/src/lib/components/SearchBar.svelte +++ /dev/null @@ -1,30 +0,0 @@ - - -
                -
                - -
                - -
                \ No newline at end of file diff --git a/webapp/src/lib/components/SearchFilterBar.svelte b/webapp/src/lib/components/SearchFilterBar.svelte deleted file mode 100644 index fab0288c..00000000 --- a/webapp/src/lib/components/SearchFilterBar.svelte +++ /dev/null @@ -1,55 +0,0 @@ - - -
                -
                -
                -
                - - -
                -
                - {#if showPerPageSelector} -
                -
                - - -
                -
                - {/if} -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/TablePagination.svelte b/webapp/src/lib/components/TablePagination.svelte deleted file mode 100644 index f5cb0d96..00000000 --- a/webapp/src/lib/components/TablePagination.svelte +++ /dev/null @@ -1,98 +0,0 @@ - - -{#if totalPages > 1} -
                - -
                - - -
                - - - -
                -{/if} \ No newline at end of file diff --git a/webapp/src/lib/components/Toast.svelte b/webapp/src/lib/components/Toast.svelte deleted file mode 100644 index 02197e34..00000000 --- a/webapp/src/lib/components/Toast.svelte +++ /dev/null @@ -1,107 +0,0 @@ - - - -
                - {#each toasts as toast (toast.id)} -
                -
                -
                - {@html getToastIcon(toast.type)} -
                -
                -

                - {toast.title} -

                - {#if toast.message} -
                - {toast.message} -
                - {/if} -
                -
                - -
                -
                -
                - {/each} -
                \ No newline at end of file diff --git a/webapp/src/lib/components/Tooltip.svelte b/webapp/src/lib/components/Tooltip.svelte deleted file mode 100644 index 7071bf45..00000000 --- a/webapp/src/lib/components/Tooltip.svelte +++ /dev/null @@ -1,29 +0,0 @@ - - -
                - - - - - - -
                \ No newline at end of file diff --git a/webapp/src/lib/components/UpdateEnterpriseModal.svelte b/webapp/src/lib/components/UpdateEnterpriseModal.svelte deleted file mode 100644 index 4b0d9cc4..00000000 --- a/webapp/src/lib/components/UpdateEnterpriseModal.svelte +++ /dev/null @@ -1,208 +0,0 @@ - - - dispatch('close')}> -
                -
                -

                Update Enterprise

                -

                {enterprise.name}

                -
                - -
                - - {#if error} -
                -

                {error}

                -
                - {/if} - - {#if loading} -
                -
                -

                Loading...

                -
                - {:else} -
                - -
                - - -

                - Only showing credentials for GitHub endpoints -

                -
                - - -
                - - -
                - - -
                -
                - - -
                - - {#if changeWebhookSecret} -
                -
                - - -
                - {#if !generateWebhookSecret} - - {:else} -

                - A new webhook secret will be automatically generated -

                - {/if} -
                - {/if} -
                - - -
                - - -
                -
                - {/if} -
                -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/UpdateEntityModal.svelte b/webapp/src/lib/components/UpdateEntityModal.svelte deleted file mode 100644 index ca26e726..00000000 --- a/webapp/src/lib/components/UpdateEntityModal.svelte +++ /dev/null @@ -1,266 +0,0 @@ - - - dispatch('close')}> -
                -
                -

                Update {getEntityTitle()}

                -

                {getEntityDisplayName()}

                -
                - -
                - {#if error} -
                -

                {error}

                -
                - {/if} - - -
                -

                {getEntityTitle()} Information

                -
                - {#if entityType === 'repository'} -
                - Owner: - {getOwner()} -
                - {/if} -
                - Name: - {entity.name} -
                -
                - Endpoint: - {entity.endpoint?.name} -
                -
                - Current Credentials: - {entity.credentials_name} -
                -
                - Current Pool Balancer: - {entity.pool_balancing_type || 'roundrobin'} -
                -
                -
                - -
                - -
                - - {#if loadingCredentials} -
                - {:else} - - {/if} -

                - Leave unchanged to keep current credentials -

                -
                - - -
                - - -

                - Round Robin distributes jobs evenly across pools, Pack fills pools in order -

                -
                - - -
                -
                - - -
                - - {#if changeWebhookSecret} -
                - - -

                - Leave empty to auto-generate a new secret -

                -
                - {/if} -
                -
                - - -
                - - -
                -
                -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/UpdateOrganizationModal.svelte b/webapp/src/lib/components/UpdateOrganizationModal.svelte deleted file mode 100644 index 10162fca..00000000 --- a/webapp/src/lib/components/UpdateOrganizationModal.svelte +++ /dev/null @@ -1,211 +0,0 @@ - - - dispatch('close')}> -
                -
                -

                Update Organization

                -

                {organization.name}

                -
                - -
                - - {#if error} -
                -

                {error}

                -
                - {/if} - - {#if loading} -
                -
                -

                Loading...

                -
                - {:else} -
                - -
                - - -

                - Only showing credentials for {organizationEndpointType} endpoints -

                -
                - - -
                - - -
                - - -
                -
                - - -
                - - {#if changeWebhookSecret} -
                -
                - - -
                - {#if !generateWebhookSecret} - - {:else} -

                - A new webhook secret will be automatically generated -

                - {/if} -
                - {/if} -
                - - -
                - - -
                -
                - {/if} -
                -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/UpdatePoolModal.svelte b/webapp/src/lib/components/UpdatePoolModal.svelte deleted file mode 100644 index 8dce1d51..00000000 --- a/webapp/src/lib/components/UpdatePoolModal.svelte +++ /dev/null @@ -1,427 +0,0 @@ - - - dispatch('close')}> -
                -
                -

                - Update Pool {pool.id} -

                -
                - -
                - {#if error} -
                -

                {error}

                -
                - {/if} - - -
                -

                Pool Information (Read-only)

                -
                -
                - Provider: - {pool.provider_name} -
                -
                - Entity: - - {getEntityType(pool)}: {getEntityName(pool)} - -
                -
                -
                - - -
                -

                - Image & OS Configuration -

                -
                -
                - - -
                -
                - - -
                -
                - - -
                -
                - - -
                -
                -
                - - -
                -

                - Runner Limits & Timing -

                -
                -
                - - -
                -
                - - -
                -
                - - -
                -
                -
                - - -
                -

                - Advanced Settings -

                -
                -
                - - -
                -
                - - -
                -
                - - -
                -
                - - -
                -
                - - Tags - -
                -
                - - -
                - {#if tags.length > 0} -
                - {#each tags as tag, index} - - {tag} - - - {/each} -
                - {/if} -
                -
                -
                - - -
                -
                - - Extra Specs (JSON) - - -
                -
                - - -
                - - -
                -
                - - -
                - - -
                -
                -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/UpdateRepositoryModal.svelte b/webapp/src/lib/components/UpdateRepositoryModal.svelte deleted file mode 100644 index 77c53db4..00000000 --- a/webapp/src/lib/components/UpdateRepositoryModal.svelte +++ /dev/null @@ -1,147 +0,0 @@ - - - dispatch('close')}> -
                -
                -

                Update Repository

                -

                {repository.owner}/{repository.name}

                -
                - -
                - {#if error} -
                -

                {error}

                -
                - {/if} - - -
                -

                Repository Information

                -
                -
                - Owner: - {repository.owner} -
                -
                - Name: - {repository.name} -
                -
                - Endpoint: - {repository.endpoint?.name} -
                -
                - Credentials: - {repository.credentials_name} -
                -
                -
                - - -
                -
                - - -
                - - {#if changeWebhookSecret} -
                - - -

                - Leave empty to auto-generate a new secret -

                -
                - {/if} -
                - - -
                - - -
                -
                -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/UpdateScaleSetModal.svelte b/webapp/src/lib/components/UpdateScaleSetModal.svelte deleted file mode 100644 index c858ce0c..00000000 --- a/webapp/src/lib/components/UpdateScaleSetModal.svelte +++ /dev/null @@ -1,340 +0,0 @@ - - - dispatch('close')}> -
                -
                -

                - Update Scale Set {scaleSet.name} -

                -
                - -
                - {#if error} -
                -

                {error}

                -
                - {/if} - - -
                -

                Scale Set Information

                -
                -
                - Provider: - {scaleSet.provider_name} -
                -
                - Entity: - - {#if scaleSet.repo_name}Repository: {scaleSet.repo_name} - {:else if scaleSet.org_name}Organization: {scaleSet.org_name} - {:else if scaleSet.enterprise_name}Enterprise: {scaleSet.enterprise_name} - {:else}Unknown Entity{/if} - -
                -
                -
                - - -
                - - -
                - - -
                -

                - Image & OS Configuration -

                -
                -
                - - -
                -
                - - -
                -
                - - -
                -
                - - -
                -
                -
                - - -
                -

                - Runner Limits & Timing -

                -
                -
                - - -
                -
                - - -
                -
                - - -
                -
                -
                - - -
                -

                - Advanced Settings -

                -
                -
                - - -
                -
                - - -
                -
                - - -
                -
                - - Extra Specs (JSON) - - -
                -
                - - -
                - - -
                -
                - - -
                - - -
                -
                -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/WebhookSection.svelte b/webapp/src/lib/components/WebhookSection.svelte deleted file mode 100644 index b4085f79..00000000 --- a/webapp/src/lib/components/WebhookSection.svelte +++ /dev/null @@ -1,173 +0,0 @@ - - -
                -
                -
                -
                -

                - Webhook Status -

                -
                - {#if checking} -
                -
                - Checking... -
                - {:else if isInstalled} -
                - - - - Webhook installed -
                - {#if webhookInfo} -
                - URL: {webhookInfo.url || 'N/A'} -
                - {/if} - {:else} -
                - - - - No webhook installed -
                - {/if} -
                -
                - -
                - {#if !checking} - {#if isInstalled} - - {:else} - - {/if} - {/if} -
                -
                -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/cells/ActionsCell.svelte b/webapp/src/lib/components/cells/ActionsCell.svelte deleted file mode 100644 index f79df481..00000000 --- a/webapp/src/lib/components/cells/ActionsCell.svelte +++ /dev/null @@ -1,46 +0,0 @@ - - -
                - {#each actions as action} - handleAction(action.type)} - /> - {/each} -
                \ No newline at end of file diff --git a/webapp/src/lib/components/cells/EndpointCell.svelte b/webapp/src/lib/components/cells/EndpointCell.svelte deleted file mode 100644 index 609fc891..00000000 --- a/webapp/src/lib/components/cells/EndpointCell.svelte +++ /dev/null @@ -1,15 +0,0 @@ - - -
                -
                - {@html getForgeIcon(item?.endpoint?.endpoint_type || item?.endpoint_type || 'unknown', iconSize)} -
                -
                - {item?.endpoint?.name || item?.endpoint_name || item?.endpoint_type ||'Unknown'} -
                -
                \ No newline at end of file diff --git a/webapp/src/lib/components/cells/EntityCell.svelte b/webapp/src/lib/components/cells/EntityCell.svelte deleted file mode 100644 index 758f6599..00000000 --- a/webapp/src/lib/components/cells/EntityCell.svelte +++ /dev/null @@ -1,81 +0,0 @@ - - -
                - {entityName}{#if entityType === 'instance' && item?.provider_id} -
                - {item.provider_id} -
                - {/if} -
                \ No newline at end of file diff --git a/webapp/src/lib/components/cells/GenericCell.svelte b/webapp/src/lib/components/cells/GenericCell.svelte deleted file mode 100644 index edbca9f1..00000000 --- a/webapp/src/lib/components/cells/GenericCell.svelte +++ /dev/null @@ -1,59 +0,0 @@ - - -{#if type === 'code'} - - {displayValue} - -{:else} - - {displayValue} - -{/if} \ No newline at end of file diff --git a/webapp/src/lib/components/cells/InstancePoolCell.svelte b/webapp/src/lib/components/cells/InstancePoolCell.svelte deleted file mode 100644 index e4366e68..00000000 --- a/webapp/src/lib/components/cells/InstancePoolCell.svelte +++ /dev/null @@ -1,19 +0,0 @@ - - -
                -{#if item?.pool_id} - - Pool: {item.pool_id} - -{:else if item?.scale_set_id} - - Scale Set: {item.scale_set_id} - -{:else} - - -{/if} -
                diff --git a/webapp/src/lib/components/cells/PoolEntityCell.svelte b/webapp/src/lib/components/cells/PoolEntityCell.svelte deleted file mode 100644 index e8ed933d..00000000 --- a/webapp/src/lib/components/cells/PoolEntityCell.svelte +++ /dev/null @@ -1,15 +0,0 @@ - - -
                - - {getEntityName(item, eagerCache)} - - - {getEntityType(item)} - -
                \ No newline at end of file diff --git a/webapp/src/lib/components/cells/StatusCell.svelte b/webapp/src/lib/components/cells/StatusCell.svelte deleted file mode 100644 index 5526d307..00000000 --- a/webapp/src/lib/components/cells/StatusCell.svelte +++ /dev/null @@ -1,118 +0,0 @@ - - -{#key `${item?.name || 'item'}-${item?.[statusField] || 'status'}-${item?.updated_at || 'time'}`} - -{/key} \ No newline at end of file diff --git a/webapp/src/lib/components/cells/index.ts b/webapp/src/lib/components/cells/index.ts deleted file mode 100644 index b54e5f74..00000000 --- a/webapp/src/lib/components/cells/index.ts +++ /dev/null @@ -1,8 +0,0 @@ -export { default as EntityCell } from './EntityCell.svelte'; -export { default as EndpointCell } from './EndpointCell.svelte'; -export { default as StatusCell } from './StatusCell.svelte'; -export { default as ActionsCell } from './ActionsCell.svelte'; -export { default as GenericCell } from './GenericCell.svelte'; -export { default as PoolEntityCell } from './PoolEntityCell.svelte'; -export { default as InstancePoolCell } from './InstancePoolCell.svelte'; -export { default as MobileCard } from '../MobileCard.svelte'; \ No newline at end of file diff --git a/webapp/src/lib/stores/auth.ts b/webapp/src/lib/stores/auth.ts deleted file mode 100644 index 5e5befa4..00000000 --- a/webapp/src/lib/stores/auth.ts +++ /dev/null @@ -1,281 +0,0 @@ -import { writable } from 'svelte/store'; -import { browser } from '$app/environment'; -import { garmApi } from '../api/client.js'; - -// Check if we're in development mode (cross-origin setup) -const isDevelopmentMode = () => { - if (!browser) return false; - // Development mode: either VITE_GARM_API_URL is set OR we detect cross-origin - return !!(import.meta.env.VITE_GARM_API_URL) || window.location.port === '5173'; -}; - -interface AuthState { - isAuthenticated: boolean; - user: string | null; - loading: boolean; - needsInitialization: boolean; -} - -const initialState: AuthState = { - isAuthenticated: false, - user: null, - loading: true, - needsInitialization: false -}; - -// Keep using writable store for compatibility with existing API calls -// but enhance with Svelte 5 features where possible -export const authStore = writable(initialState); - -// Cookie utilities -function setCookie(name: string, value: string, days: number = 7): void { - if (!browser) return; - - const expires = new Date(); - expires.setTime(expires.getTime() + (days * 24 * 60 * 60 * 1000)); - document.cookie = `${name}=${value};expires=${expires.toUTCString()};path=/;SameSite=Lax`; -} - -function getCookie(name: string): string | null { - if (!browser) return null; - - const nameEQ = name + "="; - const ca = document.cookie.split(';'); - for (let i = 0; i < ca.length; i++) { - let c = ca[i]; - while (c.charAt(0) === ' ') c = c.substring(1, c.length); - if (c.indexOf(nameEQ) === 0) { - const value = c.substring(nameEQ.length, c.length); - return value; - } - } - return null; -} - -function deleteCookie(name: string): void { - if (!browser) return; - document.cookie = `${name}=;expires=Thu, 01 Jan 1970 00:00:01 GMT;path=/`; -} - -// Auth utilities -export const auth = { - async login(username: string, password: string): Promise { - try { - authStore.update(state => ({ ...state, loading: true })); - - const response = await garmApi.login({ username, password }); - - // Store JWT token in cookies for server authentication and set it in the API client - if (browser) { - setCookie('garm_token', response.token); - setCookie('garm_user', username); - } - - // Set the token in the API client for future requests - garmApi.setToken(response.token); - - authStore.set({ - isAuthenticated: true, - user: username, - loading: false, - needsInitialization: false - }); - } catch (error) { - authStore.update(state => ({ ...state, loading: false })); - throw error; - } - }, - - logout(): void { - if (browser) { - deleteCookie('garm_token'); - deleteCookie('garm_user'); - } - - authStore.set({ - isAuthenticated: false, - user: null, - loading: false, - needsInitialization: false - }); - }, - - async init(): Promise { - if (browser) { - try { - authStore.update(state => ({ ...state, loading: true })); - - // First, always check initialization status by doing GET /api/v1/login - await auth.checkInitializationStatus(); - - // If we get here without needsInitialization being set, check for existing auth - const token = getCookie('garm_token'); - const user = getCookie('garm_user'); - - if (token && user) { - // Set the token in the API client for future requests - garmApi.setToken(token); - - // Verify token is still valid - const isValid = await auth.checkAuth(); - if (isValid) { - // Token is valid, set authenticated state - authStore.set({ - isAuthenticated: true, - user, - loading: false, - needsInitialization: false - }); - return; - } - } - - // No valid token, user needs to login (but GARM is initialized) - authStore.update(state => ({ - ...state, - loading: false, - needsInitialization: false - })); - - } catch (error) { - // If checkInitializationStatus threw an error, it should have set needsInitialization - authStore.update(state => ({ ...state, loading: false })); - } - } else { - authStore.update(state => ({ ...state, loading: false })); - } - }, - - // Check initialization status by calling GET /api/v1/login - async checkInitializationStatus(): Promise { - try { - // Make a GET request to /api/v1/login to check status - const headers: Record = { - 'Accept': 'application/json', - }; - - // In development mode, always use Bearer token; in production, prefer cookies - const token = getCookie('garm_token'); - const isDevMode = isDevelopmentMode(); - - if (isDevMode && token) { - headers['Authorization'] = `Bearer ${token}`; - } - - const response = await fetch('/api/v1/login', { - method: 'GET', - headers, - // Only include credentials in production (same-origin) - credentials: isDevMode ? 'omit' : 'include' - }); - - if (!response.ok) { - if (response.status === 409) { - const errorData = await response.json(); - if (errorData.error === 'init_required') { - // GARM needs initialization - authStore.update(state => ({ - ...state, - needsInitialization: true, - loading: false - })); - throw new Error('Initialization required'); - } - } - // For other 4xx/5xx errors, assume GARM is initialized - return; - } - - // GET /api/v1/login succeeded, GARM is initialized - return; - - } catch (error) { - // If it's our initialization error, re-throw it - if (error instanceof Error && error.message === 'Initialization required') { - throw error; - } - // For network errors or other issues, assume GARM is initialized - return; - } - }, - - // Check if token is still valid by making a test API call - async checkAuth(): Promise { - try { - // First check if initialization is still required - await auth.checkInitializationStatus(); - - // If we get here, GARM is initialized, now check if token is valid - await garmApi.getControllerInfo(); - return true; - } catch (error: any) { - // If it's initialization required, the checkInitializationStatus already handled it - if (error instanceof Error && error.message === 'Initialization required') { - return false; - } - - // Check if it's an initialization required error from the API call - if (error?.response?.status === 409 && - error?.response?.data?.error === 'init_required') { - authStore.update(state => ({ - ...state, - needsInitialization: true, - loading: false - })); - return false; - } - - // Token is invalid, logout - auth.logout(); - return false; - } - }, - - // Initialize GARM controller - async initialize( - username: string, - email: string, - password: string, - fullName?: string, - urls?: { - callbackUrl?: string; - metadataUrl?: string; - webhookUrl?: string; - } - ): Promise { - try { - authStore.update(state => ({ ...state, loading: true })); - - // Step 1: Create the admin user - const response = await garmApi.firstRun({ - username, - email, - password, - full_name: fullName || username - }); - - // Step 2: Login with the new credentials - await auth.login(username, password); - - // Step 3: Set controller URLs (similar to garm-cli init) - const currentUrl = window.location.origin; - const finalMetadataUrl = urls?.metadataUrl || `${currentUrl}/api/v1/metadata`; - const finalCallbackUrl = urls?.callbackUrl || `${currentUrl}/api/v1/callbacks`; - const finalWebhookUrl = urls?.webhookUrl || `${currentUrl}/webhooks`; - - await garmApi.updateController({ - metadata_url: finalMetadataUrl, - callback_url: finalCallbackUrl, - webhook_url: finalWebhookUrl - }); - - authStore.update(state => ({ - ...state, - needsInitialization: false - })); - } catch (error) { - authStore.update(state => ({ ...state, loading: false })); - throw error; - } - } -}; \ No newline at end of file diff --git a/webapp/src/lib/stores/eager-cache.ts b/webapp/src/lib/stores/eager-cache.ts deleted file mode 100644 index 37835e82..00000000 --- a/webapp/src/lib/stores/eager-cache.ts +++ /dev/null @@ -1,609 +0,0 @@ -import { writable, get } from 'svelte/store'; -import { garmApi } from '../api/client.js'; -import { websocketStore, type WebSocketEvent } from './websocket.js'; -import type { - Repository, - Organization, - Enterprise, - Pool, - ScaleSet, - ForgeCredentials, - ForgeEndpoint, - ControllerInfo -} from '../api/generated/api.js'; - -interface EagerCacheState { - repositories: Repository[]; - organizations: Organization[]; - enterprises: Enterprise[]; - pools: Pool[]; - scalesets: ScaleSet[]; - credentials: ForgeCredentials[]; - endpoints: ForgeEndpoint[]; - controllerInfo: ControllerInfo | null; - loading: { - repositories: boolean; - organizations: boolean; - enterprises: boolean; - pools: boolean; - scalesets: boolean; - credentials: boolean; - endpoints: boolean; - controllerInfo: boolean; - }; - loaded: { - repositories: boolean; - organizations: boolean; - enterprises: boolean; - pools: boolean; - scalesets: boolean; - credentials: boolean; - endpoints: boolean; - controllerInfo: boolean; - }; - errorMessages: { - repositories: string; - organizations: string; - enterprises: string; - pools: string; - scalesets: string; - credentials: string; - endpoints: string; - controllerInfo: string; - }; -} - -const initialState: EagerCacheState = { - repositories: [], - organizations: [], - enterprises: [], - pools: [], - scalesets: [], - credentials: [], - endpoints: [], - controllerInfo: null, - loading: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: false, - credentials: false, - endpoints: false, - controllerInfo: false, - }, - loaded: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: false, - credentials: false, - endpoints: false, - controllerInfo: false, - }, - errorMessages: { - repositories: '', - organizations: '', - enterprises: '', - pools: '', - scalesets: '', - credentials: '', - endpoints: '', - controllerInfo: '', - } -}; - -export const eagerCache = writable(initialState); - -class EagerCacheManager { - private unsubscribers: (() => void)[] = []; - private loadingPromises: Map> = new Map(); - private retryAttempts: Map = new Map(); - private readonly MAX_RETRIES = 3; - private readonly RETRY_DELAY_MS = 1000; - private websocketStatusUnsubscriber: (() => void) | null = null; - - async loadResource(resourceType: keyof Omit, priority: boolean = false) { - // Avoid duplicate loading - if (this.loadingPromises.has(resourceType)) { - return this.loadingPromises.get(resourceType); - } - - // Clear any previous error message and set loading state - eagerCache.update(state => ({ - ...state, - loading: { ...state.loading, [resourceType]: true }, - errorMessages: { ...state.errorMessages, [resourceType]: '' } - })); - - const loadPromise = this.attemptLoad(resourceType); - this.loadingPromises.set(resourceType, loadPromise); - - try { - const data = await loadPromise; - eagerCache.update(state => ({ - ...state, - [resourceType]: data, - loading: { ...state.loading, [resourceType]: false }, - loaded: { ...state.loaded, [resourceType]: true }, - errorMessages: { ...state.errorMessages, [resourceType]: '' } - })); - - // Reset retry attempts on success - this.retryAttempts.delete(resourceType); - - // If this is a priority load, start background loading of other resources - if (priority) { - this.startBackgroundLoading(resourceType); - } - - return data; - } catch (error) { - const errorMessage = error instanceof Error ? error.message : 'Failed to load data'; - eagerCache.update(state => ({ - ...state, - loading: { ...state.loading, [resourceType]: false }, - errorMessages: { ...state.errorMessages, [resourceType]: errorMessage } - })); - console.error(`Failed to load ${resourceType}:`, error); - throw error; - } finally { - this.loadingPromises.delete(resourceType); - } - } - - private async attemptLoad(resourceType: keyof Omit): Promise { - const currentAttempt = (this.retryAttempts.get(resourceType) || 0) + 1; - this.retryAttempts.set(resourceType, currentAttempt); - - try { - let loadPromise: Promise; - - switch (resourceType) { - case 'repositories': - loadPromise = garmApi.listRepositories(); - break; - case 'organizations': - loadPromise = garmApi.listOrganizations(); - break; - case 'enterprises': - loadPromise = garmApi.listEnterprises(); - break; - case 'pools': - loadPromise = garmApi.listAllPools(); - break; - case 'scalesets': - loadPromise = garmApi.listScaleSets(); - break; - case 'credentials': - loadPromise = garmApi.listAllCredentials(); - break; - case 'endpoints': - loadPromise = garmApi.listAllEndpoints(); - break; - case 'controllerInfo': - loadPromise = garmApi.getControllerInfo(); - break; - default: - throw new Error(`Unknown resource type: ${resourceType}`); - } - - return await loadPromise; - } catch (error) { - // If we haven't reached max retries, try again with exponential backoff - if (currentAttempt < this.MAX_RETRIES) { - const delay = this.RETRY_DELAY_MS * Math.pow(2, currentAttempt - 1); // Exponential backoff - console.warn(`Attempt ${currentAttempt} failed for ${resourceType}, retrying in ${delay}ms...`, error); - - await new Promise(resolve => setTimeout(resolve, delay)); - return this.attemptLoad(resourceType); - } else { - console.error(`All ${this.MAX_RETRIES} attempts failed for ${resourceType}:`, error); - throw error; - } - } - } - - private async startBackgroundLoading(excludeResource: string) { - const resourceTypes = ['repositories', 'organizations', 'enterprises', 'pools', 'scalesets', 'credentials', 'endpoints']; - const toLoad = resourceTypes.filter(type => type !== excludeResource); - - // Load in background with slight delays to avoid overwhelming the API - for (const resourceType of toLoad) { - setTimeout(() => { - this.loadResource(resourceType as any, false).catch(error => { - console.warn(`Background loading failed for ${resourceType}:`, error); - // Background loading failures are not critical, just log them - }); - }, 100 * toLoad.indexOf(resourceType)); - } - } - - // Public method to manually retry loading a resource - retryResource(resourceType: keyof Omit) { - // Clear any existing retry attempts to start fresh - this.retryAttempts.delete(resourceType); - return this.loadResource(resourceType, true); - } - - setupWebSocketSubscriptions() { - // Clean up existing subscriptions - this.cleanup(); - - // Subscribe to all resource types - const subscriptions = [ - websocketStore.subscribeToEntity('repository', ['create', 'update', 'delete'], this.handleRepositoryEvent.bind(this)), - websocketStore.subscribeToEntity('organization', ['create', 'update', 'delete'], this.handleOrganizationEvent.bind(this)), - websocketStore.subscribeToEntity('enterprise', ['create', 'update', 'delete'], this.handleEnterpriseEvent.bind(this)), - websocketStore.subscribeToEntity('pool', ['create', 'update', 'delete'], this.handlePoolEvent.bind(this)), - websocketStore.subscribeToEntity('scaleset', ['create', 'update', 'delete'], this.handleScaleSetEvent.bind(this)), - websocketStore.subscribeToEntity('controller', ['update'], this.handleControllerEvent.bind(this)), - websocketStore.subscribeToEntity('github_credentials', ['create', 'update', 'delete'], this.handleCredentialsEvent.bind(this)), - websocketStore.subscribeToEntity('gitea_credentials', ['create', 'update', 'delete'], this.handleCredentialsEvent.bind(this)), - websocketStore.subscribeToEntity('github_endpoint', ['create', 'update', 'delete'], this.handleEndpointEvent.bind(this)) - ]; - - this.unsubscribers = subscriptions; - - // Monitor WebSocket connection status - this.setupWebSocketStatusMonitoring(); - } - - private setupWebSocketStatusMonitoring() { - if (this.websocketStatusUnsubscriber) { - this.websocketStatusUnsubscriber(); - } - - let wasConnected = false; - - this.websocketStatusUnsubscriber = websocketStore.subscribe(state => { - // When WebSocket connects for the first time or reconnects after being disconnected - if (state.connected && !wasConnected) { - console.log('[EagerCache] WebSocket connected - reinitializing cache'); - // Reload all resources when WebSocket connects - this.initializeAllResources(); - } - wasConnected = state.connected; - }); - } - - // Reinitialize all resources when WebSocket connects - private async initializeAllResources() { - const resourceTypes: (keyof Omit)[] = [ - 'repositories', 'organizations', 'enterprises', 'pools', 'scalesets', - 'credentials', 'endpoints', 'controllerInfo' - ]; - - // Load all resources in parallel - const loadPromises = resourceTypes.map(resourceType => - this.loadResource(resourceType, true).catch(error => { - console.warn(`Failed to reload ${resourceType} on WebSocket reconnect:`, error); - }) - ); - - await Promise.allSettled(loadPromises); - } - - private handleRepositoryEvent(event: WebSocketEvent) { - eagerCache.update(state => { - if (!state.loaded.repositories) return state; - - const repositories = [...state.repositories]; - const repo = event.payload as Repository; - - if (event.operation === 'create') { - repositories.push(repo); - } else if (event.operation === 'update') { - const index = repositories.findIndex(r => r.id === repo.id); - if (index !== -1) repositories[index] = repo; - } else if (event.operation === 'delete') { - const repoId = typeof repo === 'object' ? repo.id : repo; - const index = repositories.findIndex(r => r.id === repoId); - if (index !== -1) repositories.splice(index, 1); - } - - return { ...state, repositories }; - }); - } - - private handleOrganizationEvent(event: WebSocketEvent) { - eagerCache.update(state => { - if (!state.loaded.organizations) return state; - - const organizations = [...state.organizations]; - const org = event.payload as Organization; - - if (event.operation === 'create') { - organizations.push(org); - } else if (event.operation === 'update') { - const index = organizations.findIndex(o => o.id === org.id); - if (index !== -1) organizations[index] = org; - } else if (event.operation === 'delete') { - const orgId = typeof org === 'object' ? org.id : org; - const index = organizations.findIndex(o => o.id === orgId); - if (index !== -1) organizations.splice(index, 1); - } - - return { ...state, organizations }; - }); - } - - private handleEnterpriseEvent(event: WebSocketEvent) { - eagerCache.update(state => { - if (!state.loaded.enterprises) return state; - - const enterprises = [...state.enterprises]; - const ent = event.payload as Enterprise; - - if (event.operation === 'create') { - enterprises.push(ent); - } else if (event.operation === 'update') { - const index = enterprises.findIndex(e => e.id === ent.id); - if (index !== -1) enterprises[index] = ent; - } else if (event.operation === 'delete') { - const entId = typeof ent === 'object' ? ent.id : ent; - const index = enterprises.findIndex(e => e.id === entId); - if (index !== -1) enterprises.splice(index, 1); - } - - return { ...state, enterprises }; - }); - } - - private handlePoolEvent(event: WebSocketEvent) { - eagerCache.update(state => { - if (!state.loaded.pools) return state; - - const pools = [...state.pools]; - const pool = event.payload as Pool; - - if (event.operation === 'create') { - pools.push(pool); - } else if (event.operation === 'update') { - const index = pools.findIndex(p => p.id === pool.id); - if (index !== -1) pools[index] = pool; - } else if (event.operation === 'delete') { - const poolId = typeof pool === 'object' ? pool.id : pool; - const index = pools.findIndex(p => p.id === poolId); - if (index !== -1) pools.splice(index, 1); - } - - return { ...state, pools }; - }); - } - - private handleScaleSetEvent(event: WebSocketEvent) { - eagerCache.update(state => { - if (!state.loaded.scalesets) return state; - - const scalesets = [...state.scalesets]; - const scaleset = event.payload as ScaleSet; - - if (event.operation === 'create') { - scalesets.push(scaleset); - } else if (event.operation === 'update') { - const index = scalesets.findIndex(s => s.id === scaleset.id); - if (index !== -1) scalesets[index] = scaleset; - } else if (event.operation === 'delete') { - const scalesetId = typeof scaleset === 'object' ? scaleset.id : scaleset; - const index = scalesets.findIndex(s => s.id === scalesetId); - if (index !== -1) scalesets.splice(index, 1); - } - - return { ...state, scalesets }; - }); - } - - private handleCredentialsEvent(event: WebSocketEvent) { - eagerCache.update(state => { - if (!state.loaded.credentials) return state; - - const credentials = [...state.credentials]; - const cred = event.payload as ForgeCredentials; - - if (event.operation === 'create') { - credentials.push(cred); - } else if (event.operation === 'update') { - const index = credentials.findIndex(c => c.id === cred.id); - if (index !== -1) credentials[index] = cred; - } else if (event.operation === 'delete') { - const credId = typeof cred === 'object' ? cred.id : cred; - const index = credentials.findIndex(c => c.id === credId); - if (index !== -1) credentials.splice(index, 1); - } - - return { ...state, credentials }; - }); - } - - private handleEndpointEvent(event: WebSocketEvent) { - eagerCache.update(state => { - if (!state.loaded.endpoints) return state; - - const endpoints = [...state.endpoints]; - const endpoint = event.payload as ForgeEndpoint; - - if (event.operation === 'create') { - endpoints.push(endpoint); - } else if (event.operation === 'update') { - const index = endpoints.findIndex(e => e.name === endpoint.name); - if (index !== -1) endpoints[index] = endpoint; - } else if (event.operation === 'delete') { - const endpointName = typeof endpoint === 'object' ? endpoint.name : endpoint; - const index = endpoints.findIndex(e => e.name === endpointName); - if (index !== -1) endpoints.splice(index, 1); - } - - return { ...state, endpoints }; - }); - } - - cleanup() { - this.unsubscribers.forEach(unsubscribe => unsubscribe()); - this.unsubscribers = []; - - if (this.websocketStatusUnsubscriber) { - this.websocketStatusUnsubscriber(); - this.websocketStatusUnsubscriber = null; - } - } - - // Helper method to check if we should use cache or direct API - private shouldUseCache(): boolean { - const wsState = get(websocketStore); - return wsState.connected; - } - - // Helper methods for components - check WebSocket status first - async getRepositories(): Promise { - const wsState = get(websocketStore); - - if (!wsState.connected) { - // WebSocket disconnected - fetch directly from API - console.log('[EagerCache] WebSocket disconnected - fetching repositories directly from API'); - return await garmApi.listRepositories(); - } - - const state = get(eagerCache); - if (state.loaded.repositories) { - return state.repositories; - } - - return this.loadResource('repositories', true); - } - - async getOrganizations(): Promise { - const wsState = get(websocketStore); - - if (!wsState.connected) { - console.log('[EagerCache] WebSocket disconnected - fetching organizations directly from API'); - return await garmApi.listOrganizations(); - } - - const state = get(eagerCache); - if (state.loaded.organizations) { - return state.organizations; - } - - return this.loadResource('organizations', true); - } - - async getEnterprises(): Promise { - const wsState = get(websocketStore); - - if (!wsState.connected) { - console.log('[EagerCache] WebSocket disconnected - fetching enterprises directly from API'); - return await garmApi.listEnterprises(); - } - - const state = get(eagerCache); - if (state.loaded.enterprises) { - return state.enterprises; - } - - return this.loadResource('enterprises', true); - } - - async getPools(): Promise { - const wsState = get(websocketStore); - - if (!wsState.connected) { - console.log('[EagerCache] WebSocket disconnected - fetching pools directly from API'); - return await garmApi.listAllPools(); - } - - const state = get(eagerCache); - if (state.loaded.pools) { - return state.pools; - } - - return this.loadResource('pools', true); - } - - async getScaleSets(): Promise { - const wsState = get(websocketStore); - - if (!wsState.connected) { - console.log('[EagerCache] WebSocket disconnected - fetching scalesets directly from API'); - return await garmApi.listScaleSets(); - } - - const state = get(eagerCache); - if (state.loaded.scalesets) { - return state.scalesets; - } - - return this.loadResource('scalesets', true); - } - - async getCredentials(): Promise { - const wsState = get(websocketStore); - - if (!wsState.connected) { - console.log('[EagerCache] WebSocket disconnected - fetching credentials directly from API'); - return await garmApi.listAllCredentials(); - } - - const state = get(eagerCache); - if (state.loaded.credentials) { - return state.credentials; - } - - return this.loadResource('credentials', true); - } - - async getEndpoints(): Promise { - const wsState = get(websocketStore); - - if (!wsState.connected) { - console.log('[EagerCache] WebSocket disconnected - fetching endpoints directly from API'); - return await garmApi.listAllEndpoints(); - } - - const state = get(eagerCache); - if (state.loaded.endpoints) { - return state.endpoints; - } - - return this.loadResource('endpoints', true); - } - - async getControllerInfo(): Promise { - const wsState = get(websocketStore); - - if (!wsState.connected) { - console.log('[EagerCache] WebSocket disconnected - fetching controller info directly from API'); - return await garmApi.getControllerInfo(); - } - - const state = get(eagerCache); - if (state.loaded.controllerInfo) { - return state.controllerInfo; - } - - return this.loadResource('controllerInfo', true); - } - - private handleControllerEvent(event: WebSocketEvent) { - eagerCache.update(state => { - if (!state.loaded.controllerInfo) return state; - - const controllerInfo = event.payload as ControllerInfo; - - // Controller info is a singleton, so we just replace it - if (event.operation === 'update') { - return { ...state, controllerInfo }; - } - - return state; - }); - } -} - -export const eagerCacheManager = new EagerCacheManager(); - -// Initialize websocket subscriptions when the module is loaded -if (typeof window !== 'undefined') { - eagerCacheManager.setupWebSocketSubscriptions(); -} \ No newline at end of file diff --git a/webapp/src/lib/stores/toast.ts b/webapp/src/lib/stores/toast.ts deleted file mode 100644 index 84619ec1..00000000 --- a/webapp/src/lib/stores/toast.ts +++ /dev/null @@ -1,58 +0,0 @@ -import { writable } from 'svelte/store'; - -export interface Toast { - id: string; - type: 'success' | 'error' | 'info' | 'warning'; - title: string; - message: string; - duration?: number; // milliseconds, 0 for manual dismiss -} - -function createToastStore() { - const { subscribe, set, update } = writable([]); - - const store = { - subscribe, - add: (toast: Omit) => { - const id = Math.random().toString(36).substr(2, 9); - const newToast: Toast = { - ...toast, - id, - duration: toast.duration ?? 5000 - }; - - update(toasts => [...toasts, newToast]); - - // Auto-remove after duration - if (newToast.duration && newToast.duration > 0) { - setTimeout(() => { - update(toasts => toasts.filter(t => t.id !== id)); - }, newToast.duration); - } - - return id; - }, - remove: (id: string) => { - update(toasts => toasts.filter(t => t.id !== id)); - }, - clear: () => { - set([]); - }, - success: (title: string, message: string = '', duration?: number) => { - return store.add({ type: 'success', title, message, duration }); - }, - error: (title: string, message: string = '', duration?: number) => { - return store.add({ type: 'error', title, message, duration }); - }, - info: (title: string, message: string = '', duration?: number) => { - return store.add({ type: 'info', title, message, duration }); - }, - warning: (title: string, message: string = '', duration?: number) => { - return store.add({ type: 'warning', title, message, duration }); - } - }; - - return store; -} - -export const toastStore = createToastStore(); \ No newline at end of file diff --git a/webapp/src/lib/stores/websocket.ts b/webapp/src/lib/stores/websocket.ts deleted file mode 100644 index 3938023b..00000000 --- a/webapp/src/lib/stores/websocket.ts +++ /dev/null @@ -1,367 +0,0 @@ -import { writable, get } from 'svelte/store'; - -// Event types that match the websocket API -export type EntityType = - | 'repository' - | 'organization' - | 'enterprise' - | 'pool' - | 'user' - | 'instance' - | 'job' - | 'controller' - | 'github_credentials' - | 'gitea_credentials' - | 'github_endpoint' - | 'scaleset'; - -export type Operation = 'create' | 'update' | 'delete'; - -export interface EventFilter { - 'entity-type': EntityType; - operations: Operation[]; -} - -export interface FilterMessage { - 'send-everything'?: boolean; - filters?: EventFilter[]; -} - -export interface WebSocketEvent { - 'entity-type': EntityType; - operation: Operation; - payload: any; -} - -export interface WebSocketState { - connected: boolean; - connecting: boolean; - error: string | null; - lastEvent: WebSocketEvent | null; -} - -// Create the websocket store -function createWebSocketStore() { - const { subscribe, set, update } = writable({ - connected: false, - connecting: false, - error: null, - lastEvent: null - }); - - let ws: WebSocket | null = null; - let reconnectAttempts = 0; - let maxReconnectAttempts = 50; // Increased for more persistent reconnection - let baseReconnectInterval = 1000; // Base interval - let reconnectInterval = 1000; // Current interval - let maxReconnectInterval = 30000; // Max 30 seconds - let reconnectTimeout: number | null = null; - let currentFilters: EventFilter[] = []; - let manuallyDisconnected = false; - - // Event callbacks organized by entity type - const eventCallbacks = new Map void)[]>(); - - function getWebSocketUrl(): string { - const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:'; - const host = window.location.host; - return `${protocol}//${host}/api/v1/ws/events`; - } - - function connect() { - if (ws && (ws.readyState === WebSocket.CONNECTING || ws.readyState === WebSocket.OPEN)) { - return; - } - - manuallyDisconnected = false; - update(state => ({ ...state, connecting: true, error: null })); - - try { - const wsUrl = getWebSocketUrl(); - - // Use cookie authentication - no need for Bearer token in protocol - ws = new WebSocket(wsUrl); - - // Set connection timeout - const connectionTimeout = setTimeout(() => { - if (ws && ws.readyState === WebSocket.CONNECTING) { - ws.close(); - } - }, 10000); // 10 second timeout - - ws.onopen = () => { - clearTimeout(connectionTimeout); - reconnectAttempts = 0; - reconnectInterval = baseReconnectInterval; - - update(state => ({ ...state, connected: true, connecting: false, error: null })); - - // Send current filters if any - if (currentFilters.length > 0) { - sendFilters(currentFilters); - } - - // Setup heartbeat (currently no-op, but ready for future use) - startHeartbeat(); - }; - - ws.onmessage = (event) => { - try { - const data = JSON.parse(event.data); - - // Update the store with the last event - update(state => ({ ...state, lastEvent: data })); - - // Call registered callbacks for this entity type - const callbacks = eventCallbacks.get(data['entity-type']) || []; - callbacks.forEach(callback => { - try { - callback(data); - } catch (err) { - console.error('[WebSocket] Error in event callback:', err); - } - }); - } catch (err) { - console.error('[WebSocket] Error parsing message:', err); - } - }; - - ws.onclose = (event) => { - clearTimeout(connectionTimeout); - cleanup(); - - const wasManualDisconnect = event.code === 1000 && manuallyDisconnected; - const errorMessage = event.code !== 1000 ? `Connection closed: ${event.reason || 'Unknown reason'}` : null; - - update(state => ({ - ...state, - connected: false, - connecting: false, - error: errorMessage - })); - - // Attempt to reconnect unless it was explicitly a manual disconnect - // This includes server restarts that result in clean closes (code 1000) - if (!wasManualDisconnect) { - scheduleReconnect(); - } - }; - - ws.onerror = (error) => { - clearTimeout(connectionTimeout); - cleanup(); - - update(state => ({ - ...state, - connected: false, - connecting: false, - error: 'WebSocket connection error' - })); - - // Schedule reconnect on error if not manually disconnected - if (!manuallyDisconnected) { - scheduleReconnect(); - } - }; - - } catch (err) { - update(state => ({ - ...state, - connected: false, - connecting: false, - error: err instanceof Error ? err.message : 'Failed to connect' - })); - } - } - - function startHeartbeat() { - // Clear any existing intervals - cleanup(); - - // No need for client-side heartbeat checks since: - // 1. Server handles ping/pong automatically (every ~54 seconds) - // 2. Browser WebSocket automatically responds to ping frames with pong frames - // 3. Server will close connection if it doesn't receive pong responses - // 4. Server may not send any messages if there are no events to stream - // 5. onclose/onerror handlers will trigger reconnection if needed - } - - function cleanup() { - // No intervals to clean up currently - } - - function scheduleReconnect() { - if (manuallyDisconnected) { - return; - } - - if (reconnectTimeout) { - clearTimeout(reconnectTimeout); - } - - reconnectAttempts++; - - // Reset attempts periodically to allow for long-term reconnection - if (reconnectAttempts > maxReconnectAttempts) { - reconnectAttempts = 1; - reconnectInterval = baseReconnectInterval; - } - - const actualInterval = Math.min(reconnectInterval, maxReconnectInterval); - - reconnectTimeout = window.setTimeout(() => { - if (!manuallyDisconnected) { - connect(); - // Exponential backoff with jitter to avoid thundering herd - const jitter = Math.random() * 1000; // 0-1 second jitter - reconnectInterval = Math.min(reconnectInterval * 1.5 + jitter, maxReconnectInterval); - } - }, actualInterval); - } - - function sendFilters(filters: EventFilter[]) { - if (ws && ws.readyState === WebSocket.OPEN) { - const message: FilterMessage = { - 'send-everything': false, - filters: filters - }; - ws.send(JSON.stringify(message)); - currentFilters = [...filters]; - } - } - - function disconnect() { - manuallyDisconnected = true; - - if (reconnectTimeout) { - clearTimeout(reconnectTimeout); - reconnectTimeout = null; - } - - cleanup(); - - if (ws) { - ws.close(1000, 'Manual disconnect'); - ws = null; - } - - // Clear all callbacks - eventCallbacks.clear(); - currentFilters = []; - - update(state => ({ - ...state, - connected: false, - connecting: false, - error: null, - lastEvent: null - })); - } - - // Handle network connectivity changes - function handleNetworkChange() { - if (navigator.onLine && !manuallyDisconnected) { - // Delay reconnection slightly to allow network to stabilize - setTimeout(() => { - if (!ws || ws.readyState === WebSocket.CLOSED || ws.readyState === WebSocket.CLOSING) { - reconnectAttempts = 0; // Reset attempts on network recovery - reconnectInterval = baseReconnectInterval; - connect(); - } - }, 2000); - } - } - - // Listen for network changes - if (typeof window !== 'undefined') { - window.addEventListener('online', handleNetworkChange); - window.addEventListener('offline', () => { - update(state => ({ ...state, error: 'Network offline' })); - }); - - // Periodic check to ensure connection is maintained - setInterval(() => { - // Always maintain connection unless manually disconnected - if (!manuallyDisconnected) { - // If we should be connected but aren't, attempt to reconnect - if (!ws || ws.readyState === WebSocket.CLOSED || ws.readyState === WebSocket.CLOSING) { - connect(); - } - } - }, 10000); // Check every 10 seconds - } - - // Subscribe to events for a specific entity type - function subscribeToEntity(entityType: EntityType, operations: Operation[], callback: (event: WebSocketEvent) => void) { - - // Add callback to the list for this entity type - if (!eventCallbacks.has(entityType)) { - eventCallbacks.set(entityType, []); - } - eventCallbacks.get(entityType)!.push(callback); - - // Add or update the filter for this entity type - const existingFilterIndex = currentFilters.findIndex(f => f['entity-type'] === entityType); - const newFilter: EventFilter = { - 'entity-type': entityType, - operations: operations - }; - - if (existingFilterIndex >= 0) { - // Merge operations with existing filter - const existingOps = currentFilters[existingFilterIndex].operations; - newFilter.operations = Array.from(new Set([...existingOps, ...operations])); - currentFilters[existingFilterIndex] = newFilter; - } else { - currentFilters.push(newFilter); - } - - // Send updated filters if connected - if (ws && ws.readyState === WebSocket.OPEN) { - sendFilters(currentFilters); - } - - // Ensure connection exists (should already be connected via auto-connect) - if (!ws || ws.readyState === WebSocket.CLOSED || ws.readyState === WebSocket.CLOSING) { - connect(); - } - - // Return unsubscribe function - return () => { - const callbacks = eventCallbacks.get(entityType); - if (callbacks) { - const index = callbacks.indexOf(callback); - if (index > -1) { - callbacks.splice(index, 1); - } - - // If no more callbacks for this entity type, remove the filter - if (callbacks.length === 0) { - eventCallbacks.delete(entityType); - const filterIndex = currentFilters.findIndex(f => f['entity-type'] === entityType); - if (filterIndex > -1) { - currentFilters.splice(filterIndex, 1); - if (ws && ws.readyState === WebSocket.OPEN) { - sendFilters(currentFilters); - } - } - } - } - }; - } - - // Auto-connect when store is created (browser environment only) - if (typeof window !== 'undefined') { - // Connect immediately - connect(); - } - - return { - subscribe, - connect, - disconnect, - subscribeToEntity - }; -} - -export const websocketStore = createWebSocketStore(); \ No newline at end of file diff --git a/webapp/src/lib/test/EmptyComponent.svelte b/webapp/src/lib/test/EmptyComponent.svelte deleted file mode 100644 index 281c6866..00000000 --- a/webapp/src/lib/test/EmptyComponent.svelte +++ /dev/null @@ -1 +0,0 @@ -
                \ No newline at end of file diff --git a/webapp/src/lib/utils/apiError.ts b/webapp/src/lib/utils/apiError.ts deleted file mode 100644 index e175a65b..00000000 --- a/webapp/src/lib/utils/apiError.ts +++ /dev/null @@ -1,88 +0,0 @@ -import type { APIErrorResponse } from '$lib/api/generated/api'; - -/** - * Extracts error message from API error response - * @param error - The error object from API call - * @returns Human-readable error message - * - * @example - * ```typescript - * try { - * await garmApi.deletePool(poolId); - * } catch (error) { - * const message = extractAPIError(error); - * // Will return "Pool deletion failed. Pool has active runners" if both error and details exist - * // Or just "Pool deletion failed" if only error exists - * // Or just "Pool has active runners" if only details exist - * toastStore.error('Delete Failed', message); - * } - * ``` - */ -export function extractAPIError(error: any): string { - // Default fallback message - let errorMessage = 'An unexpected error occurred'; - - // Try to extract APIErrorResponse from the error - if (error && typeof error === 'object') { - // Check if it's an axios error with response - if ('response' in error && error.response && typeof error.response === 'object') { - const response = error.response; - - // Check if response has data with APIErrorResponse structure - if ('data' in response && response.data && typeof response.data === 'object') { - const apiError = response.data as APIErrorResponse; - - // Build message from available fields - const errorPart = apiError.error && apiError.error.trim() ? apiError.error : ''; - const detailsPart = apiError.details && apiError.details.trim() ? apiError.details : ''; - - if (errorPart && detailsPart) { - // Both available - combine them - return `${errorPart}. ${detailsPart}`; - } else if (errorPart) { - // Only error available - return errorPart; - } else if (detailsPart) { - // Only details available - return detailsPart; - } - } - - // If no APIErrorResponse, try to get status-based message - if ('status' in response) { - const status = response.status; - switch (status) { - case 400: - errorMessage = 'Bad request - please check your input'; - break; - case 401: - errorMessage = 'Unauthorized - please log in again'; - break; - case 403: - errorMessage = 'Access denied - insufficient permissions'; - break; - case 404: - errorMessage = 'Resource not found'; - break; - case 409: - errorMessage = 'Conflict - resource already exists or is in use'; - break; - case 422: - errorMessage = 'Validation failed - please check your input'; - break; - case 500: - errorMessage = 'Internal server error - please try again later'; - break; - default: - errorMessage = `Request failed with status ${status}`; - } - } - } - // Check if it's a direct Error object with a meaningful message - else if (error instanceof Error && error.message && !error.message.includes('status code')) { - return error.message; - } - } - - return errorMessage; -} \ No newline at end of file diff --git a/webapp/src/lib/utils/common.ts b/webapp/src/lib/utils/common.ts deleted file mode 100644 index 204ca49a..00000000 --- a/webapp/src/lib/utils/common.ts +++ /dev/null @@ -1,298 +0,0 @@ -import { resolve } from '$app/paths'; - -/** - * Common utility functions shared across components and pages - */ - -/** - * Formats a date string or Date object to a human-readable format - */ -export function formatDate(date: string | Date | null | undefined): string { - if (!date) return 'N/A'; - try { - const d = typeof date === 'string' ? new Date(date) : date; - return d.toLocaleString(); - } catch { - return 'Invalid Date'; - } -} - -/** - * Returns the appropriate forge icon SVG for the given endpoint type - * @param endpointType - The type of endpoint ('github', 'gitea', etc.) - * @param sizeClasses - Optional size classes (e.g., 'w-4 h-4', 'w-8 h-8'). Defaults to 'w-4 h-4' - */ -export function getForgeIcon(endpointType: string, sizeClasses: string = 'w-4 h-4'): string { - if (endpointType === 'gitea') { - return ``; - } else if (endpointType === 'github') { - // GitHub (also used for GHES) - return `
                `; - } else { - // Return a generic placeholder icon if endpoint type is unknown - return ` - - - `; - } -} - -/** - * Truncates an image name to a specified length and indicates if it was truncated - */ -export function truncateImageName(imageName: string, maxLength: number = 25): { truncated: string, isTruncated: boolean } { - if (imageName.length <= maxLength) { - return { truncated: imageName, isTruncated: false }; - } - return { truncated: imageName.substring(0, maxLength) + '...', isTruncated: true }; -} - -/** - * Gets the entity name for a Pool or ScaleSet object - */ -export function getEntityName(entity: any, eagerCacheStores?: any): string { - // Both Pool and ScaleSet objects now include the name fields directly - if (entity.repo_name) return entity.repo_name; - if (entity.org_name) return entity.org_name; - if (entity.enterprise_name) return entity.enterprise_name; - - // Fallback to eager cache lookup if name fields are not available (older API or cached data) - if (entity.repo_id && !entity.repo_name && eagerCacheStores?.repositories) { - const repo = eagerCacheStores.repositories.find((r: any) => r.id === entity.repo_id); - return repo ? `${repo.owner}/${repo.name}` : 'Unknown Entity'; - } - if (entity.org_id && !entity.org_name && eagerCacheStores?.organizations) { - const org = eagerCacheStores.organizations.find((o: any) => o.id === entity.org_id); - return (org && org.name) ? org.name : 'Unknown Entity'; - } - if (entity.enterprise_id && !entity.enterprise_name && eagerCacheStores?.enterprises) { - const enterprise = eagerCacheStores.enterprises.find((e: any) => e.id === entity.enterprise_id); - return (enterprise && enterprise.name) ? enterprise.name : 'Unknown Entity'; - } - - return 'Unknown Entity'; -} - -/** - * Gets the entity type for a Pool or ScaleSet object - */ -export function getEntityType(entity: any): string { - if (entity.repo_id) return 'repository'; - if (entity.org_id) return 'organization'; - if (entity.enterprise_id) return 'enterprise'; - return 'unknown'; -} - -/** - * Gets the URL for an entity detail page - */ -export function getEntityUrl(entity: any): string { - if (entity.repo_id) return resolve(`/repositories/${entity.repo_id}`); - if (entity.org_id) return resolve(`/organizations/${entity.org_id}`); - if (entity.enterprise_id) return resolve(`/enterprises/${entity.enterprise_id}`); - return '#'; -} - -/** - * Updates entity fields, preserving events and other non-API fields - */ -export function updateEntityFields(currentEntity: any, updatedFields: any): any { - // Preserve only fields that are definitely not in the API response - const { events: originalEvents } = currentEntity; - - // Use the API response as the primary source, add back preserved fields - const result = { - ...updatedFields, - events: originalEvents // Always preserve events since they're managed by websockets - }; - - return result; -} - -/** - * Scrolls to bottom of events container - */ -export function scrollToBottomEvents(eventsContainer: HTMLElement | null): void { - if (eventsContainer) { - eventsContainer.scrollTop = eventsContainer.scrollHeight; - } -} - -/** - * Changes pagination page - */ -export function changePage(currentPage: number, targetPage: number, totalPages: number): number { - if (targetPage >= 1 && targetPage <= totalPages) { - return targetPage; - } - return currentPage; -} - -/** - * Changes items per page and resets to page 1 - */ -export function changePerPage(newPerPage: number): { newPerPage: number, newCurrentPage: number } { - return { newPerPage, newCurrentPage: 1 }; -} - -/** - * Gets entity status badge information based on pool_manager_status - */ -export function getEntityStatusBadge(entity: any): { text: string, variant: 'success' | 'error' } { - if (entity.pool_manager_status?.running) { - return { - text: 'Running', - variant: 'success' - }; - } else { - return { - text: 'Stopped', - variant: 'error' - }; - } -} - -/** - * Gets badge variant for enabled/disabled status - */ -export function getEnabledStatusBadge(enabled: boolean): { text: string, variant: 'success' | 'error' } { - return { - text: enabled ? 'Enabled' : 'Disabled', - variant: enabled ? 'success' : 'error' - }; -} - -/** - * Gets badge variant for authentication type - */ -export function getAuthTypeBadge(authType: string): { text: string, variant: 'success' | 'info' } { - return { - text: authType === 'pat' ? 'PAT' : 'App', - variant: authType === 'pat' ? 'success' : 'info' - }; -} - -/** - * Gets badge variant for event level - */ -export function getEventLevelBadge(level: string): { text: string, variant: 'success' | 'error' | 'warning' | 'info' } { - const normalizedLevel = level.toLowerCase(); - switch (normalizedLevel) { - case 'error': - return { text: 'Error', variant: 'error' }; - case 'warning': - return { text: 'Warning', variant: 'warning' }; - case 'info': - return { text: 'Info', variant: 'info' }; - default: - return { text: level, variant: 'info' }; - } -} - -/** - * Filters entities by search term, supporting different search field configurations - */ -export function filterEntities>( - entities: T[], - searchTerm: string, - searchFields: string[] | ((entity: T, eagerCache?: any) => string) -): T[] { - if (!searchTerm.trim()) return entities; - - const lowercaseSearch = searchTerm.toLowerCase(); - - return entities.filter(entity => { - if (typeof searchFields === 'function') { - // Custom search function (e.g., for pools/scalesets using getEntityName) - const searchText = searchFields(entity); - return searchText.toLowerCase().includes(lowercaseSearch); - } else { - // Field-based search - return searchFields.some(field => { - const value = entity[field]; - return value?.toString().toLowerCase().includes(lowercaseSearch); - }); - } - }); -} - -/** - * Convenience function for filtering repositories (searches name and owner) - */ -export function filterRepositories(repositories: T[], searchTerm: string): T[] { - return filterEntities(repositories, searchTerm, ['name', 'owner']); -} - -/** - * Convenience function for filtering organizations/enterprises (searches name only) - */ -export function filterByName(entities: T[], searchTerm: string): T[] { - return filterEntities(entities, searchTerm, ['name']); -} - -/** - * Convenience function for filtering credentials (searches name, description, and endpoint name) - */ -export function filterCredentials(credentials: T[], searchTerm: string): T[] { - return filterEntities(credentials, searchTerm, (credential) => { - const searchableText = [ - credential.name || '', - credential.description || '', - credential.endpoint?.name || '' - ].join(' '); - return searchableText; - }); -} - -/** - * Convenience function for filtering endpoints (searches name, description, base_url, and api_base_url) - */ -export function filterEndpoints(endpoints: T[], searchTerm: string): T[] { - return filterEntities(endpoints, searchTerm, ['name', 'description', 'base_url', 'api_base_url']); -} - -/** - * Pagination utility functions - */ -export interface PaginationState { - currentPage: number; - perPage: number; - totalPages: number; -} - -/** - * Creates paginated slice of items - */ -export function paginateItems(items: T[], currentPage: number, perPage: number): T[] { - return items.slice( - (currentPage - 1) * perPage, - currentPage * perPage - ); -} - -/** - * Calculates total pages and adjusts current page if needed - */ -export function calculatePagination(totalItems: number, perPage: number, currentPage: number): PaginationState { - const totalPages = Math.ceil(totalItems / perPage); - const adjustedCurrentPage = (currentPage > totalPages && totalPages > 0) ? totalPages : currentPage; - - return { - currentPage: adjustedCurrentPage, - perPage, - totalPages - }; -} - -/** - * Creates pagination info text (e.g., "Showing 1 to 25 of 100 results") - */ -export function getPaginationInfo(currentPage: number, perPage: number, totalItems: number): string { - if (totalItems === 0) return 'No results'; - - const start = (currentPage - 1) * perPage + 1; - const end = Math.min(currentPage * perPage, totalItems); - - return `Showing ${start} to ${end} of ${totalItems} results`; -} - diff --git a/webapp/src/lib/utils/status.ts b/webapp/src/lib/utils/status.ts deleted file mode 100644 index 7f7f68a8..00000000 --- a/webapp/src/lib/utils/status.ts +++ /dev/null @@ -1,90 +0,0 @@ -/** - * Unified status formatting and styling utilities - * Provides consistent status display and color coding across all pages - */ - -/** - * Formats status text for display by replacing underscores with spaces - * and converting to proper case - */ -export function formatStatusText(status: string): string { - if (!status) return ''; - return status.replace(/_/g, ' ').toLowerCase() - .split(' ') - .map(word => word.charAt(0).toUpperCase() + word.slice(1)) - .join(' '); -} - -/** - * Returns Tailwind CSS classes for status badges based on industry-standard color conventions: - * - Green: Successfully running/active states - * - Blue: Idle/ready states - * - Yellow/Amber: Warning or transitional states - * - Purple: Creating/building states - * - Orange: Deletion/termination in progress - * - Red: Failed/error states - * - Gray: Unknown/pending states - */ -export function getStatusBadgeClass(status: string): string { - if (!status) { - return 'bg-gray-50 text-gray-700 ring-gray-600/20 dark:bg-gray-500/10 dark:text-gray-400 dark:ring-gray-500/20'; - } - - const normalizedStatus = status.toLowerCase(); - - switch (normalizedStatus) { - // Successfully running states - Green - case 'running': - case 'online': - return 'bg-green-50 text-green-700 ring-green-600/20 dark:bg-green-500/10 dark:text-green-400 dark:ring-green-500/20'; - - // Idle/ready states - Blue - case 'idle': - case 'stopped': - return 'bg-blue-50 text-blue-700 ring-blue-600/20 dark:bg-blue-500/10 dark:text-blue-400 dark:ring-blue-500/20'; - - // Active/working states - Yellow - case 'active': - return 'bg-yellow-50 text-yellow-700 ring-yellow-600/20 dark:bg-yellow-500/10 dark:text-yellow-400 dark:ring-yellow-500/20'; - - // Creating/building states - Purple with pulse animation - case 'creating': - case 'installing': - case 'pending_create': - case 'provisioning': - return 'bg-purple-50 text-purple-700 ring-purple-600/20 dark:bg-purple-500/10 dark:text-purple-400 dark:ring-purple-500/20 animate-pulse'; - - // Deletion/termination states - Orange with pulse animation - case 'deleting': - case 'terminating': - case 'pending_delete': - case 'destroying': - return 'bg-orange-50 text-orange-700 ring-orange-600/20 dark:bg-orange-500/10 dark:text-orange-400 dark:ring-orange-500/20 animate-pulse'; - - // Failed/error states - Red - case 'failed': - case 'error': - case 'terminated': - case 'offline': - return 'bg-red-50 text-red-700 ring-red-600/20 dark:bg-red-500/10 dark:text-red-400 dark:ring-red-500/20'; - - // General pending states - Gray with pulse animation - case 'pending': - case 'unknown': - return 'bg-gray-50 text-gray-700 ring-gray-600/20 dark:bg-gray-500/10 dark:text-gray-400 dark:ring-gray-500/20 animate-pulse'; - - // Default - Gray - default: - return 'bg-gray-50 text-gray-700 ring-gray-600/20 dark:bg-gray-500/10 dark:text-gray-400 dark:ring-gray-500/20'; - } -} - -/** - * Combined utility that returns both formatted text and CSS classes - */ -export function getFormattedStatus(status: string): { text: string; classes: string } { - return { - text: formatStatusText(status), - classes: getStatusBadgeClass(status) - }; -} \ No newline at end of file diff --git a/webapp/src/openapitools.json b/webapp/src/openapitools.json deleted file mode 100644 index a82623d6..00000000 --- a/webapp/src/openapitools.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "$schema": "./node_modules/@openapitools/openapi-generator-cli/config.schema.json", - "spaces": 2, - "generator-cli": { - "version": "7.14.0" - } -} diff --git a/webapp/src/routes/+layout.svelte b/webapp/src/routes/+layout.svelte deleted file mode 100644 index b5f22c10..00000000 --- a/webapp/src/routes/+layout.svelte +++ /dev/null @@ -1,87 +0,0 @@ - - - - GARM - GitHub Actions Runner Manager - - -{#if $authStore.loading} -
                -
                -
                -

                Loading...

                -
                -
                -{:else if requiresAuth && !$authStore.isAuthenticated} - -
                -
                -

                Redirecting to login...

                -
                -
                -{:else if isLoginPage || isInitPage} - - -{:else} - -
                - - -
                -
                -
                - -
                -
                -
                -
                -{/if} - - - diff --git a/webapp/src/routes/+layout.ts b/webapp/src/routes/+layout.ts deleted file mode 100644 index bc675bc0..00000000 --- a/webapp/src/routes/+layout.ts +++ /dev/null @@ -1,13 +0,0 @@ -import type { LayoutLoad } from './$types'; - -export const load: LayoutLoad = async ({ url }) => { - // For now, we'll handle auth redirect in the component - // In a real app, you might check auth state here - - return { - url: url.pathname - }; -}; - -export const prerender = false; -export const ssr = false; \ No newline at end of file diff --git a/webapp/src/routes/+page.svelte b/webapp/src/routes/+page.svelte deleted file mode 100644 index 07df6498..00000000 --- a/webapp/src/routes/+page.svelte +++ /dev/null @@ -1,322 +0,0 @@ - - - - Dashboard - GARM - - -
                - -
                -

                Dashboard

                -

                - Welcome to GARM - GitHub Actions Runner Manager -

                -
                - - {#if error} - -
                -
                -
                - - - -
                -
                -

                Error loading dashboard

                -

                {error}

                -
                -
                -
                - {/if} - - - - - - {#if controllerInfo} - - {/if} - - - -
                \ No newline at end of file diff --git a/webapp/src/routes/credentials/+page.svelte b/webapp/src/routes/credentials/+page.svelte deleted file mode 100644 index 7de54cd4..00000000 --- a/webapp/src/routes/credentials/+page.svelte +++ /dev/null @@ -1,1020 +0,0 @@ - - - - - - Credentials - GARM - - -
                - - - - - - -
                -
                -
                -

                - {credential.name} -

                -

                - {credential.description} -

                -
                -
                - {@html getForgeIcon(credential.forge_type || 'unknown')} - {credential.endpoint?.name || 'Unknown'} -
                -
                -
                -
                -
                - {#if (credential['auth-type'] || 'pat') === 'pat'} - - {:else} - - {/if} -
                - showEditCredentialsModal(credential)} - /> - showDeleteCredentialsModal(credential)} - /> -
                -
                -
                -
                - - -
                -
                - - -{#if showCreateModal} - -{/if} - - -{#if showEditModal && editingCredential} - -{/if} - - -{#if showDeleteModal && deletingCredential} - -{/if} \ No newline at end of file diff --git a/webapp/src/routes/credentials/page.integration.test.ts b/webapp/src/routes/credentials/page.integration.test.ts deleted file mode 100644 index a1461fad..00000000 --- a/webapp/src/routes/credentials/page.integration.test.ts +++ /dev/null @@ -1,720 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render, screen, waitFor } from '@testing-library/svelte'; -import CredentialsPage from './+page.svelte'; -import { createMockGithubCredentials, createMockGiteaCredentials, createMockForgeEndpoint, createMockGiteaEndpoint } from '../../test/factories.js'; - -// Mock app stores and navigation -vi.mock('$app/stores', () => ({})); - -vi.mock('$app/navigation', () => ({})); - -const mockGithubCredential = createMockGithubCredentials({ - id: 1001, - name: 'github-creds', - description: 'GitHub credentials', - 'auth-type': 'pat' -}); - -const mockGiteaCredential = createMockGiteaCredentials({ - id: 1002, - name: 'gitea-creds', - description: 'Gitea credentials', - 'auth-type': 'pat' -}); - -const mockCredentials = [mockGithubCredential, mockGiteaCredential]; -const mockEndpoints = [createMockForgeEndpoint(), createMockGiteaEndpoint()]; - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/PageHeader.svelte'); -vi.unmock('$lib/components/DataTable.svelte'); -vi.unmock('$lib/components/ForgeTypeSelector.svelte'); -vi.unmock('$lib/components/ActionButton.svelte'); -vi.unmock('$lib/components/cells'); - -// Only mock the data layer - APIs and stores -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - createGithubCredentials: vi.fn(), - createGiteaCredentials: vi.fn(), - updateGithubCredentials: vi.fn(), - updateGiteaCredentials: vi.fn(), - deleteGithubCredentials: vi.fn(), - deleteGiteaCredentials: vi.fn() - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn() - } -})); - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback) => { - callback({ - credentials: mockCredentials, - endpoints: mockEndpoints, - loading: { credentials: false, endpoints: false }, - loaded: { credentials: true, endpoints: true }, - errorMessages: { credentials: '', endpoints: '' } - }); - return () => {}; - }) - }, - eagerCacheManager: { - getCredentials: vi.fn(), - getEndpoints: vi.fn(), - retryResource: vi.fn() - } -})); - -vi.mock('$lib/utils/common.js', () => ({ - getForgeIcon: vi.fn(() => ''), - filterCredentials: vi.fn((credentials, searchTerm) => { - if (!searchTerm) return credentials; - return credentials.filter((credential: any) => - credential.name?.toLowerCase().includes(searchTerm.toLowerCase()) || - credential.description?.toLowerCase().includes(searchTerm.toLowerCase()) - ); - }), - changePerPage: vi.fn((perPage) => ({ newPerPage: perPage, newCurrentPage: 1 })), - paginateItems: vi.fn((items, currentPage, perPage) => { - const start = (currentPage - 1) * perPage; - return items.slice(start, start + perPage); - }), - getAuthTypeBadge: vi.fn((authType) => authType === 'pat' ? 'PAT' : 'App'), - getEntityStatusBadge: vi.fn(() => 'active'), - formatDate: vi.fn((date) => date) -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -// Global setup for each test -let garmApi: any; -let eagerCacheManager: any; - -describe('Comprehensive Integration Tests for Credentials Page', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up API mocks with default successful responses - const apiModule = await import('$lib/api/client.js'); - garmApi = apiModule.garmApi; - - const cacheModule = await import('$lib/stores/eager-cache.js'); - eagerCacheManager = cacheModule.eagerCacheManager; - - (eagerCacheManager.getCredentials as any).mockResolvedValue(mockCredentials); - (eagerCacheManager.getEndpoints as any).mockResolvedValue(mockEndpoints); - (garmApi.createGithubCredentials as any).mockResolvedValue({}); - (garmApi.createGiteaCredentials as any).mockResolvedValue({}); - (garmApi.updateGithubCredentials as any).mockResolvedValue({}); - (garmApi.updateGiteaCredentials as any).mockResolvedValue({}); - (garmApi.deleteGithubCredentials as any).mockResolvedValue({}); - (garmApi.deleteGiteaCredentials as any).mockResolvedValue({}); - }); - - describe('Component Rendering and Data Display', () => { - it('should render credentials page with real components', async () => { - render(CredentialsPage); - - await waitFor(() => { - // Wait for data to load - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }); - - // Should render the page header - expect(screen.getByRole('heading', { name: 'Credentials' })).toBeInTheDocument(); - - // Should render page description - expect(screen.getByText(/Manage authentication credentials for your GitHub and Gitea endpoints/i)).toBeInTheDocument(); - }); - - it('should display credentials data in the table', async () => { - render(CredentialsPage); - - await waitFor(() => { - // Wait for data loading to complete - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }); - - // Component should render the DataTable component which would display credential data - // The exact credential names may not be visible due to how the DataTable renders data - // but the structure should be in place for displaying credentials - expect(document.body).toBeInTheDocument(); - }); - - it('should render all major sections when data is loaded', async () => { - render(CredentialsPage); - - await waitFor(() => { - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }); - - // Should have page header with action button - expect(screen.getByRole('button', { name: /Add Credentials/i })).toBeInTheDocument(); - - // Should show the data table structure - expect(document.body).toBeInTheDocument(); - }); - }); - - describe('Search and Filtering Integration', () => { - it('should handle search functionality', async () => { - const { filterCredentials } = await import('$lib/utils/common.js'); - - render(CredentialsPage); - - await waitFor(() => { - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }); - - // Search functionality should be integrated - expect(filterCredentials).toHaveBeenCalledWith(mockCredentials, ''); - }); - - it('should filter credentials based on search term', async () => { - const { filterCredentials } = await import('$lib/utils/common.js'); - - render(CredentialsPage); - - await waitFor(() => { - // Should call filter function with empty search term initially - expect(filterCredentials).toHaveBeenCalledWith(mockCredentials, ''); - }); - - // Verify filtering logic works correctly - const filteredResults = filterCredentials(mockCredentials, 'github'); - expect(filteredResults).toHaveLength(1); - expect(filteredResults[0].name).toBe('github-creds'); - }); - }); - - describe('Pagination Integration', () => { - it('should handle pagination with real data', async () => { - const { paginateItems } = await import('$lib/utils/common.js'); - - render(CredentialsPage); - - await waitFor(() => { - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }); - - // Should paginate the credentials data - expect(paginateItems).toHaveBeenCalledWith(mockCredentials, 1, 25); - }); - - it('should handle per-page changes', async () => { - const { changePerPage } = await import('$lib/utils/common.js'); - - render(CredentialsPage); - - await waitFor(() => { - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }); - - // Change per page functionality should be available - expect(changePerPage).toBeDefined(); - }); - }); - - describe('Modal Integration', () => { - it('should handle create credential modal workflow', async () => { - render(CredentialsPage); - - await waitFor(() => { - // Should have Add Credentials button - expect(screen.getByRole('button', { name: /Add Credentials/i })).toBeInTheDocument(); - }); - - // Should have the PageHeader component integrated with create action - const addButton = screen.getByRole('button', { name: /Add Credentials/i }); - expect(addButton).toHaveClass('bg-blue-600'); - - // Create API methods should be available for the modal workflow - expect(garmApi.createGithubCredentials).toBeDefined(); - expect(garmApi.createGiteaCredentials).toBeDefined(); - - // Toast notifications should be integrated for success/error feedback - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.success).toBeDefined(); - expect(toastStore.error).toBeDefined(); - }); - - it('should handle edit credential modal workflow', async () => { - render(CredentialsPage); - - await waitFor(() => { - // Wait for data to load through API integration - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }); - - // Update API should be available for the edit workflow - expect(garmApi.updateGithubCredentials).toBeDefined(); - expect(garmApi.updateGiteaCredentials).toBeDefined(); - - // The edit functionality should be integrated through the DataTable component - // Edit buttons may not be visible when no data is loaded, but the API structure should be in place - expect(screen.getByRole('heading', { name: 'Credentials' })).toBeInTheDocument(); - }); - - it('should handle delete credential modal workflow', async () => { - render(CredentialsPage); - - await waitFor(() => { - // Wait for data to load through API integration - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }); - - // Delete API should be available for the delete workflow - expect(garmApi.deleteGithubCredentials).toBeDefined(); - expect(garmApi.deleteGiteaCredentials).toBeDefined(); - - // Confirmation modal and error handling should be integrated - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.success).toBeDefined(); - expect(toastStore.error).toBeDefined(); - - // The delete functionality should be integrated through the DataTable component - // Delete buttons may not be visible when no data is loaded, but the infrastructure should be in place - expect(screen.getByRole('heading', { name: 'Credentials' })).toBeInTheDocument(); - }); - }); - - describe('API Integration', () => { - it('should call eager cache manager when component mounts', async () => { - render(CredentialsPage); - - // Wait for API calls to complete and data to be displayed - await waitFor(() => { - // Verify the component actually called the eager cache to load data - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - - // More importantly, verify the component displays the loaded data - // Data should be integrated through the eager cache system - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }); - }); - - it('should display loading state initially then show data', async () => { - // Mock delayed cache response - (eagerCacheManager.getCredentials as any).mockImplementation(() => - new Promise(resolve => setTimeout(() => resolve(mockCredentials), 100)) - ); - - render(CredentialsPage); - - // Component should render the basic structure immediately - expect(screen.getByRole('heading', { name: 'Credentials' })).toBeInTheDocument(); - - // After cache resolves, data loading should be complete - await waitFor(() => { - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }, { timeout: 1000 }); - - // Component should handle data loading properly through the cache system - expect(screen.getByText(/Manage authentication credentials for your GitHub and Gitea endpoints/i)).toBeInTheDocument(); - }); - - it('should handle API errors and display error state', async () => { - // Mock cache to fail - const error = new Error('Failed to load credentials'); - (eagerCacheManager.getCredentials as any).mockRejectedValue(error); - - const { container } = render(CredentialsPage); - - // Wait for error to be handled - await waitFor(() => { - // Component should handle the error gracefully and continue to render - expect(container).toBeInTheDocument(); - }); - - // Should still render page structure even when data loading fails - expect(screen.getByRole('heading', { name: 'Credentials' })).toBeInTheDocument(); - - // Error handling should be integrated with retry functionality - expect(eagerCacheManager.retryResource).toBeDefined(); - - // Toast error notifications should be available for error feedback - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.error).toBeDefined(); - }); - - it('should handle retry functionality', async () => { - render(CredentialsPage); - - await waitFor(() => { - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }); - - // Retry functionality should be available - expect(eagerCacheManager.retryResource).toBeDefined(); - }); - }); - - describe('Credential Creation Integration', () => { - it('should integrate GitHub credential creation workflow', async () => { - render(CredentialsPage); - - await waitFor(() => { - // Should have the structure in place for GitHub credential creation - expect(screen.getByRole('button', { name: /Add Credentials/i })).toBeInTheDocument(); - }); - - // The GitHub credential creation workflow should be integrated - expect(garmApi.createGithubCredentials).toBeDefined(); - }); - - it('should integrate Gitea credential creation workflow', async () => { - render(CredentialsPage); - - await waitFor(() => { - // Should have the structure in place for Gitea credential creation - expect(screen.getByRole('button', { name: /Add Credentials/i })).toBeInTheDocument(); - }); - - // The Gitea credential creation workflow should be integrated - expect(garmApi.createGiteaCredentials).toBeDefined(); - }); - - it('should show success message after credential creation', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(CredentialsPage); - - await waitFor(() => { - expect(screen.getByRole('button', { name: /Add Credentials/i })).toBeInTheDocument(); - }); - - // Success toast functionality should be integrated - expect(toastStore.success).toBeDefined(); - }); - }); - - describe('Credential Update Integration', () => { - it('should integrate GitHub credential update workflow', async () => { - render(CredentialsPage); - - await waitFor(() => { - // Wait for data loading to be called - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }); - - // Update functionality should be available for GitHub credentials - expect(garmApi.updateGithubCredentials).toBeDefined(); - - // Component should be ready to handle GitHub credential updates - expect(screen.getByRole('heading', { name: 'Credentials' })).toBeInTheDocument(); - }); - - it('should integrate Gitea credential update workflow', async () => { - render(CredentialsPage); - - await waitFor(() => { - // Wait for data loading to be called - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }); - - // Update functionality should be available for Gitea credentials - expect(garmApi.updateGiteaCredentials).toBeDefined(); - - // Component should be ready to handle Gitea credential updates - expect(screen.getByRole('heading', { name: 'Credentials' })).toBeInTheDocument(); - }); - - it('should handle selective field updates', async () => { - render(CredentialsPage); - - await waitFor(() => { - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }); - - // Update APIs should be available for selective field updates - expect(garmApi.updateGithubCredentials).toBeDefined(); - expect(garmApi.updateGiteaCredentials).toBeDefined(); - - // Component should track original form data for comparison - // This enables selective updates where only changed fields are sent - expect(screen.getByRole('heading', { name: 'Credentials' })).toBeInTheDocument(); - - // Toast notifications should provide feedback for update operations - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.success).toBeDefined(); - expect(toastStore.info).toBeDefined(); - }); - }); - - describe('Credential Deletion Integration', () => { - it('should integrate GitHub credential deletion workflow', async () => { - render(CredentialsPage); - - await waitFor(() => { - // Wait for data loading to be called - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }); - - // Deletion functionality should be available - expect(garmApi.deleteGithubCredentials).toBeDefined(); - - // Component should be ready to handle GitHub credential deletion - expect(screen.getByRole('heading', { name: 'Credentials' })).toBeInTheDocument(); - }); - - it('should integrate Gitea credential deletion workflow', async () => { - render(CredentialsPage); - - await waitFor(() => { - // Wait for data loading to be called - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }); - - // Deletion functionality should be available - expect(garmApi.deleteGiteaCredentials).toBeDefined(); - - // Component should be ready to handle Gitea credential deletion - expect(screen.getByRole('heading', { name: 'Credentials' })).toBeInTheDocument(); - }); - - it('should show error handling structure for credential deletion', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - // Set up API to fail when deleteGithubCredentials is called - const error = new Error('Credential deletion failed'); - (garmApi.deleteGithubCredentials as any).mockRejectedValue(error); - - render(CredentialsPage); - - await waitFor(() => { - // Wait for data loading to be called - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }); - - // Verify the component has the proper structure for deletion error handling - expect(toastStore.error).toBeDefined(); - expect(screen.getByRole('heading', { name: 'Credentials' })).toBeInTheDocument(); - }); - }); - - describe('Component Integration and State Management', () => { - it('should integrate all sections with proper data flow', async () => { - render(CredentialsPage); - - await waitFor(() => { - // All sections should integrate properly with the main page - expect(screen.getByRole('heading', { name: 'Credentials' })).toBeInTheDocument(); - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }); - - // Data flow should be properly integrated through the eager cache system - expect(screen.getByText(/Manage authentication credentials for your GitHub and Gitea endpoints/i)).toBeInTheDocument(); - }); - - it('should maintain consistent state across components', async () => { - render(CredentialsPage); - - await waitFor(() => { - // State should be consistent across all child components - // Data should be integrated through the eager cache system - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }); - }); - - it('should handle component lifecycle correctly', () => { - const { unmount } = render(CredentialsPage); - - // Should unmount without errors - expect(() => unmount()).not.toThrow(); - }); - }); - - describe('Form Integration', () => { - it('should integrate form validation', async () => { - render(CredentialsPage); - - await waitFor(() => { - // Form validation should be integrated in the modals - expect(screen.getByRole('button', { name: /Add Credentials/i })).toBeInTheDocument(); - }); - - // Create and update APIs should be available for form submission - expect(garmApi.createGithubCredentials).toBeDefined(); - expect(garmApi.createGiteaCredentials).toBeDefined(); - expect(garmApi.updateGithubCredentials).toBeDefined(); - expect(garmApi.updateGiteaCredentials).toBeDefined(); - - // Error handling should be integrated for validation failures - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.error).toBeDefined(); - }); - - it('should handle file upload integration', async () => { - render(CredentialsPage); - - await waitFor(() => { - // File upload functionality should be available for private keys - expect(screen.getByRole('button', { name: /Add Credentials/i })).toBeInTheDocument(); - }); - - // GitHub credentials should support private key uploads for App authentication - expect(garmApi.createGithubCredentials).toBeDefined(); - expect(garmApi.updateGithubCredentials).toBeDefined(); - - // File processing should be available for base64 encoding - expect(FileReader).toBeDefined(); - expect(btoa).toBeDefined(); - - // Component should handle private key file uploads for GitHub App credentials - expect(screen.getByRole('button', { name: /Add Credentials/i })).toHaveClass('bg-blue-600'); - }); - - it('should handle forge type selection', async () => { - render(CredentialsPage); - - await waitFor(() => { - // Forge type selection should be integrated - expect(screen.getByRole('button', { name: /Add Credentials/i })).toBeInTheDocument(); - }); - - // Should support both GitHub and Gitea credential types - expect(garmApi.createGithubCredentials).toBeDefined(); - expect(garmApi.createGiteaCredentials).toBeDefined(); - - // Forge icon utility should be available for type display - const { getForgeIcon } = await import('$lib/utils/common.js'); - expect(getForgeIcon).toBeDefined(); - }); - - it('should handle authentication type selection', async () => { - render(CredentialsPage); - - await waitFor(() => { - // Authentication type selection should be integrated - expect(screen.getByRole('button', { name: /Add Credentials/i })).toBeInTheDocument(); - }); - - // Should support both PAT and App authentication for GitHub - expect(garmApi.createGithubCredentials).toBeDefined(); - - // Should have auth type badge utility for display - const { getAuthTypeBadge } = await import('$lib/utils/common.js'); - expect(getAuthTypeBadge).toBeDefined(); - }); - }); - - describe('User Interaction Flows', () => { - it('should support various user interaction flows', async () => { - render(CredentialsPage); - - await waitFor(() => { - // Should support user interactions like search, pagination, CRUD operations - expect(screen.getByRole('button', { name: /Add Credentials/i })).toBeInTheDocument(); - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }); - - // Should have search functionality available - expect(screen.getByPlaceholderText(/Search credentials/i)).toBeInTheDocument(); - }); - - it('should handle keyboard shortcuts', async () => { - render(CredentialsPage); - - await waitFor(() => { - // Should handle keyboard navigation and shortcuts - expect(screen.getByRole('button', { name: /Add Credentials/i })).toBeInTheDocument(); - }); - - // Should have keyboard accessible buttons and interactive elements - const addButton = screen.getByRole('button', { name: /Add Credentials/i }); - expect(addButton).toHaveAttribute('type', 'button'); - - // Window event listeners should be set up for keyboard handling - // This includes Escape key for modal closing and other shortcuts - expect(window.addEventListener).toBeDefined(); - - // Component should handle focus management for accessibility - expect(document.activeElement).toBeDefined(); - }); - }); - - describe('Accessibility and Responsive Design', () => { - it('should have proper accessibility attributes', async () => { - render(CredentialsPage); - - await waitFor(() => { - // Should have proper ARIA attributes and labels - expect(screen.getByRole('heading', { name: 'Credentials' })).toBeInTheDocument(); - expect(screen.getByRole('button', { name: /Add Credentials/i })).toBeInTheDocument(); - }); - }); - - it('should be responsive across different viewport sizes', async () => { - render(CredentialsPage); - - await waitFor(() => { - // Should render properly across different viewport sizes - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }); - - // Page structure should be responsive - expect(screen.getByRole('heading', { name: 'Credentials' })).toBeInTheDocument(); - }); - - it('should handle screen reader compatibility', async () => { - render(CredentialsPage); - - await waitFor(() => { - // Should be compatible with screen readers - expect(screen.getByRole('heading', { name: 'Credentials' })).toBeInTheDocument(); - }); - }); - }); - - describe('Authentication Type Handling', () => { - it('should handle PAT authentication workflow', async () => { - render(CredentialsPage); - - await waitFor(() => { - // PAT authentication should be supported for both GitHub and Gitea - expect(screen.getByRole('button', { name: /Add Credentials/i })).toBeInTheDocument(); - }); - - // PAT creation should be available for both forge types - expect(garmApi.createGithubCredentials).toBeDefined(); - expect(garmApi.createGiteaCredentials).toBeDefined(); - }); - - it('should handle App authentication workflow', async () => { - render(CredentialsPage); - - await waitFor(() => { - // App authentication should be supported for GitHub only - expect(screen.getByRole('button', { name: /Add Credentials/i })).toBeInTheDocument(); - }); - - // App creation should be available for GitHub - expect(garmApi.createGithubCredentials).toBeDefined(); - - // File upload should be available for private keys - expect(FileReader).toBeDefined(); - }); - - it('should handle authentication type restrictions for Gitea', async () => { - render(CredentialsPage); - - await waitFor(() => { - // Gitea should only support PAT authentication - expect(screen.getByRole('button', { name: /Add Credentials/i })).toBeInTheDocument(); - }); - - // Only PAT creation should be available for Gitea - expect(garmApi.createGiteaCredentials).toBeDefined(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/credentials/page.render.test.ts b/webapp/src/routes/credentials/page.render.test.ts deleted file mode 100644 index 975abfc9..00000000 --- a/webapp/src/routes/credentials/page.render.test.ts +++ /dev/null @@ -1,211 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render } from '@testing-library/svelte'; -import CredentialsPage from './+page.svelte'; -import { createMockGithubCredentials, createMockForgeEndpoint } from '../../test/factories.js'; - -// Mock all external dependencies -vi.mock('$app/stores', () => ({})); - -vi.mock('$app/navigation', () => ({})); - -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - createGithubCredentials: vi.fn(), - createGiteaCredentials: vi.fn(), - updateGithubCredentials: vi.fn(), - updateGiteaCredentials: vi.fn(), - deleteGithubCredentials: vi.fn(), - deleteGiteaCredentials: vi.fn() - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn() - } -})); - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback) => { - callback({ - credentials: [], - endpoints: [], - loading: { credentials: false, endpoints: false }, - loaded: { credentials: false, endpoints: false }, - errorMessages: { credentials: '', endpoints: '' } - }); - return () => {}; - }) - }, - eagerCacheManager: { - getCredentials: vi.fn(), - getEndpoints: vi.fn(), - retryResource: vi.fn() - } -})); - -vi.mock('$lib/utils/common.js', () => ({ - getForgeIcon: vi.fn(() => 'github'), - filterCredentials: vi.fn((credentials) => credentials), - changePerPage: vi.fn((perPage) => ({ newPerPage: perPage, newCurrentPage: 1 })), - paginateItems: vi.fn((items) => items), - getAuthTypeBadge: vi.fn(() => 'PAT'), - getEntityStatusBadge: vi.fn(() => 'active'), - formatDate: vi.fn((date) => date) -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -const mockCredential = createMockGithubCredentials({ - name: 'github-creds', - description: 'GitHub credentials', - 'auth-type': 'pat' -}); - -const mockEndpoint = createMockForgeEndpoint({ - name: 'github.com', - description: 'GitHub.com endpoint', - endpoint_type: 'github' -}); - -describe('Credentials Page - Render Tests', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up default API mocks - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - (eagerCacheManager.getCredentials as any).mockResolvedValue([mockCredential]); - (eagerCacheManager.getEndpoints as any).mockResolvedValue([mockEndpoint]); - }); - - describe('Basic Rendering', () => { - it('should render without crashing', () => { - const { container } = render(CredentialsPage); - expect(container).toBeInTheDocument(); - }); - - it('should have proper document structure', () => { - const { container } = render(CredentialsPage); - expect(container.querySelector('div')).toBeInTheDocument(); - }); - - it('should render page header', () => { - const { container } = render(CredentialsPage); - // Should have page header component - expect(container).toBeInTheDocument(); - }); - - it('should render data table', () => { - const { container } = render(CredentialsPage); - // Should have DataTable component - expect(container).toBeInTheDocument(); - }); - }); - - describe('Component Lifecycle', () => { - it('should mount successfully', () => { - const { component } = render(CredentialsPage); - expect(component).toBeDefined(); - }); - - it('should unmount without errors', () => { - const { unmount } = render(CredentialsPage); - expect(() => unmount()).not.toThrow(); - }); - - it('should handle component updates', async () => { - const { component } = render(CredentialsPage); - - // Component should handle reactive updates - expect(component).toBeDefined(); - }); - - it('should load credentials on mount', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - - render(CredentialsPage); - - // Wait for component mount and data loading - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should call eager cache to load credentials - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }); - - it('should load endpoints on mount', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - - render(CredentialsPage); - - // Wait for component mount and data loading - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should call eager cache to load endpoints - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - }); - - describe('DOM Structure', () => { - it('should create proper DOM hierarchy', () => { - const { container } = render(CredentialsPage); - - // Should have main container with proper spacing - const mainDiv = container.querySelector('div.space-y-6'); - expect(mainDiv).toBeInTheDocument(); - }); - - it('should render svelte:head for page title', async () => { - render(CredentialsPage); - - // Should set page title - expect(document.title).toContain('Credentials - GARM'); - }); - - it('should handle window event listeners', () => { - render(CredentialsPage); - - // Window should have event listener capabilities available - expect(window.addEventListener).toBeDefined(); - expect(window.removeEventListener).toBeDefined(); - - // Component should be able to handle keyboard events for modal management - expect(document).toBeDefined(); - expect(document.addEventListener).toBeDefined(); - }); - }); - - describe('Modal Rendering', () => { - it('should conditionally render create modal', () => { - const { container } = render(CredentialsPage); - - // Create modal should not be visible initially - expect(container).toBeInTheDocument(); - }); - - it('should conditionally render edit modal', () => { - const { container } = render(CredentialsPage); - - // Edit modal should not be visible initially - expect(container).toBeInTheDocument(); - }); - - it('should conditionally render delete modal', () => { - const { container } = render(CredentialsPage); - - // Delete modal should not be visible initially - expect(container).toBeInTheDocument(); - }); - - it('should conditionally render forge type selector', () => { - const { container } = render(CredentialsPage); - - // Forge type selector should be available for create modal - expect(container).toBeInTheDocument(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/credentials/page.test.ts b/webapp/src/routes/credentials/page.test.ts deleted file mode 100644 index 019c04af..00000000 --- a/webapp/src/routes/credentials/page.test.ts +++ /dev/null @@ -1,612 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render } from '@testing-library/svelte'; -import CredentialsPage from './+page.svelte'; -import { createMockGithubCredentials, createMockGiteaCredentials, createMockForgeEndpoint, createMockGiteaEndpoint } from '../../test/factories.js'; - -// Mock the page stores -vi.mock('$app/stores', () => ({})); - -// Mock navigation -vi.mock('$app/navigation', () => ({})); - -// Mock the API client -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - createGithubCredentials: vi.fn(), - createGiteaCredentials: vi.fn(), - updateGithubCredentials: vi.fn(), - updateGiteaCredentials: vi.fn(), - deleteGithubCredentials: vi.fn(), - deleteGiteaCredentials: vi.fn() - } -})); - -// Mock stores -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn() - } -})); - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback) => { - callback({ - credentials: [], - endpoints: [], - loading: { credentials: false, endpoints: false }, - loaded: { credentials: false, endpoints: false }, - errorMessages: { credentials: '', endpoints: '' } - }); - return () => {}; - }) - }, - eagerCacheManager: { - getCredentials: vi.fn(), - getEndpoints: vi.fn(), - retryResource: vi.fn() - } -})); - -// Mock utilities -vi.mock('$lib/utils/common.js', () => ({ - getForgeIcon: vi.fn(() => 'github'), - filterCredentials: vi.fn((credentials) => credentials), - changePerPage: vi.fn((perPage) => ({ newPerPage: perPage, newCurrentPage: 1 })), - paginateItems: vi.fn((items) => items), - getAuthTypeBadge: vi.fn(() => 'PAT'), - getEntityStatusBadge: vi.fn(() => 'active'), - formatDate: vi.fn((date) => date) -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -const mockGithubCredential = createMockGithubCredentials({ - name: 'github-creds', - description: 'GitHub credentials', - 'auth-type': 'pat' -}); - -const mockGiteaCredential = createMockGiteaCredentials({ - name: 'gitea-creds', - description: 'Gitea credentials', - 'auth-type': 'pat' -}); - -const mockCredentials = [mockGithubCredential, mockGiteaCredential]; -const mockEndpoints = [createMockForgeEndpoint(), createMockGiteaEndpoint()]; - -describe('Credentials Page - Unit Tests', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up default eager cache mock - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - (eagerCacheManager.getCredentials as any).mockResolvedValue(mockCredentials); - (eagerCacheManager.getEndpoints as any).mockResolvedValue(mockEndpoints); - }); - - describe('Component Initialization', () => { - it('should render successfully', () => { - const { container } = render(CredentialsPage); - expect(container).toBeInTheDocument(); - }); - - it('should set page title', () => { - render(CredentialsPage); - expect(document.title).toContain('Credentials - GARM'); - }); - }); - - describe('Data Loading', () => { - it('should load credentials on mount', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - - render(CredentialsPage); - - // Wait for component mount - await new Promise(resolve => setTimeout(resolve, 0)); - - expect(eagerCacheManager.getCredentials).toHaveBeenCalled(); - }); - - it('should load endpoints on mount', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - - render(CredentialsPage); - - // Wait for component mount - await new Promise(resolve => setTimeout(resolve, 0)); - - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - - it('should handle loading state', async () => { - const { container } = render(CredentialsPage); - - // Component should render without error during loading - expect(container).toBeInTheDocument(); - - // Should have access to loading state through eager cache - expect(document.title).toContain('Credentials - GARM'); - - // Loading infrastructure should be properly integrated - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - expect(eagerCache.subscribe).toBeDefined(); - }); - - it('should handle cache error state', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - - // Mock cache to fail - const error = new Error('Failed to load credentials'); - (eagerCacheManager.getCredentials as any).mockRejectedValue(error); - - const { container } = render(CredentialsPage); - - // Wait for the error to be handled - await new Promise(resolve => setTimeout(resolve, 100)); - - // Component should handle error gracefully - expect(container).toBeInTheDocument(); - }); - - it('should retry loading credentials', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - - render(CredentialsPage); - - // Verify retry functionality is available - expect(eagerCacheManager.retryResource).toBeDefined(); - }); - }); - - describe('Search and Pagination', () => { - it('should handle search functionality', async () => { - const { filterCredentials } = await import('$lib/utils/common.js'); - - render(CredentialsPage); - - // Verify search utility is used - expect(filterCredentials).toBeDefined(); - }); - - it('should handle pagination', async () => { - const { paginateItems, changePerPage } = await import('$lib/utils/common.js'); - - render(CredentialsPage); - - // Verify pagination utilities are available - expect(paginateItems).toBeDefined(); - expect(changePerPage).toBeDefined(); - }); - }); - - describe('Credential Creation', () => { - it('should have proper structure for GitHub credential creation', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(CredentialsPage); - - // Unit tests verify the component has access to the right dependencies - expect(garmApi.createGithubCredentials).toBeDefined(); - }); - - it('should have proper structure for Gitea credential creation', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(CredentialsPage); - - // Unit tests verify the component has access to the right dependencies - expect(garmApi.createGiteaCredentials).toBeDefined(); - }); - - it('should show success toast after credential creation', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(CredentialsPage); - - expect(toastStore.success).toBeDefined(); - }); - - it('should handle form validation', async () => { - render(CredentialsPage); - - // Component should have form validation infrastructure - expect(document.title).toContain('Credentials - GARM'); - - // API error handling should be available for validation failures - const { extractAPIError } = await import('$lib/utils/apiError'); - expect(extractAPIError).toBeDefined(); - - // Toast notifications should be available for validation feedback - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.error).toBeDefined(); - }); - - it('should handle file upload for private keys', async () => { - render(CredentialsPage); - - // Component should support file processing for private keys - expect(document.title).toContain('Credentials - GARM'); - - // Both GitHub and Gitea credentials should support file uploads (GitHub App) - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.createGithubCredentials).toBeDefined(); - expect(garmApi.createGiteaCredentials).toBeDefined(); - - // File reader and base64 encoding should be available - expect(FileReader).toBeDefined(); - }); - - it('should handle PAT vs App authentication types', async () => { - render(CredentialsPage); - - // Component should support different authentication types - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.createGithubCredentials).toBeDefined(); - expect(garmApi.createGiteaCredentials).toBeDefined(); - - // Should have forge icon utility to differentiate types - const { getForgeIcon } = await import('$lib/utils/common.js'); - expect(getForgeIcon).toBeDefined(); - }); - }); - - describe('Credential Updates', () => { - it('should have proper structure for GitHub credential updates', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(CredentialsPage); - - expect(garmApi.updateGithubCredentials).toBeDefined(); - }); - - it('should have proper structure for Gitea credential updates', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(CredentialsPage); - - expect(garmApi.updateGiteaCredentials).toBeDefined(); - }); - - it('should show success toast after credential update', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(CredentialsPage); - - expect(toastStore.success).toBeDefined(); - }); - - it('should show info toast when no changes are made', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(CredentialsPage); - - expect(toastStore.info).toBeDefined(); - }); - - it('should handle selective field updates', async () => { - render(CredentialsPage); - - // Component should have update APIs for selective field changes - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.updateGithubCredentials).toBeDefined(); - expect(garmApi.updateGiteaCredentials).toBeDefined(); - - // Should have infrastructure to track original form values - expect(document.title).toContain('Credentials - GARM'); - - // Toast notifications should provide feedback for update operations - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.success).toBeDefined(); - expect(toastStore.info).toBeDefined(); - }); - - it('should handle credential change checkbox', async () => { - render(CredentialsPage); - - // Component should handle conditional credential updates - expect(document.title).toContain('Credentials - GARM'); - - // Should have update APIs available for conditional updates - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.updateGithubCredentials).toBeDefined(); - expect(garmApi.updateGiteaCredentials).toBeDefined(); - - // Should have toast notifications for conditional update feedback - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.info).toBeDefined(); - }); - }); - - describe('Credential Deletion', () => { - it('should have proper structure for GitHub credential deletion', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(CredentialsPage); - - expect(garmApi.deleteGithubCredentials).toBeDefined(); - }); - - it('should have proper structure for Gitea credential deletion', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(CredentialsPage); - - expect(garmApi.deleteGiteaCredentials).toBeDefined(); - }); - - it('should show success toast after credential deletion', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(CredentialsPage); - - expect(toastStore.success).toBeDefined(); - }); - - it('should handle deletion errors', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(CredentialsPage); - - expect(toastStore.error).toBeDefined(); - }); - }); - - describe('Modal Management', () => { - it('should handle create modal state', async () => { - render(CredentialsPage); - - // Component should have create APIs for modal functionality - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.createGithubCredentials).toBeDefined(); - expect(garmApi.createGiteaCredentials).toBeDefined(); - - // Should have forge icon utility for modal display - const { getForgeIcon } = await import('$lib/utils/common.js'); - expect(getForgeIcon).toBeDefined(); - }); - - it('should handle edit modal state', async () => { - render(CredentialsPage); - - // Component should have update APIs for modal functionality - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.updateGithubCredentials).toBeDefined(); - expect(garmApi.updateGiteaCredentials).toBeDefined(); - - // Should have error handling for edit operations - const { extractAPIError } = await import('$lib/utils/apiError'); - expect(extractAPIError).toBeDefined(); - }); - - it('should handle delete modal state', async () => { - render(CredentialsPage); - - // Component should have delete APIs for modal functionality - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.deleteGithubCredentials).toBeDefined(); - expect(garmApi.deleteGiteaCredentials).toBeDefined(); - - // Should have toast notifications for delete feedback - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.success).toBeDefined(); - expect(toastStore.error).toBeDefined(); - }); - - it('should handle forge type selection', async () => { - render(CredentialsPage); - - // Component should support both forge types - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.createGithubCredentials).toBeDefined(); - expect(garmApi.createGiteaCredentials).toBeDefined(); - - // Should have forge icon utility for type selection display - const { getForgeIcon } = await import('$lib/utils/common.js'); - expect(getForgeIcon).toBeDefined(); - }); - - it('should handle keyboard shortcuts', () => { - render(CredentialsPage); - - // Component should have keyboard event handling infrastructure - expect(window.addEventListener).toBeDefined(); - expect(window.removeEventListener).toBeDefined(); - - // Document should be available for keyboard event management - expect(document).toBeDefined(); - expect(document.addEventListener).toBeDefined(); - }); - }); - - describe('Form State Management', () => { - it('should reset form data', async () => { - render(CredentialsPage); - - // Component should have form reset infrastructure - expect(document.title).toContain('Credentials - GARM'); - - // Should have APIs available for fresh form data - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.createGithubCredentials).toBeDefined(); - expect(garmApi.createGiteaCredentials).toBeDefined(); - }); - - it('should track original form data for updates', async () => { - render(CredentialsPage); - - // Component should have update APIs for form comparison - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.updateGithubCredentials).toBeDefined(); - expect(garmApi.updateGiteaCredentials).toBeDefined(); - - // Should have toast notifications for update feedback - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.info).toBeDefined(); - }); - - it('should handle different form fields for GitHub vs Gitea', async () => { - render(CredentialsPage); - - // Component should support both credential types with different APIs - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.createGithubCredentials).toBeDefined(); - expect(garmApi.createGiteaCredentials).toBeDefined(); - - // Should have forge icon utility to differentiate types - const { getForgeIcon } = await import('$lib/utils/common.js'); - expect(getForgeIcon).toBeDefined(); - }); - - it('should handle auth type changes', async () => { - render(CredentialsPage); - - // Component should manage authentication type state - expect(document.title).toContain('Credentials - GARM'); - - // Should support both PAT and App authentication types - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.createGithubCredentials).toBeDefined(); - expect(garmApi.createGiteaCredentials).toBeDefined(); - - // Should have auth type badge utility for state display - const { getAuthTypeBadge } = await import('$lib/utils/common.js'); - expect(getAuthTypeBadge).toBeDefined(); - - // File upload should be available for App authentication - expect(FileReader).toBeDefined(); - }); - }); - - describe('Component Lifecycle', () => { - it('should mount successfully', () => { - const component = render(CredentialsPage); - expect(component.component).toBeDefined(); - }); - - it('should unmount without errors', () => { - const { unmount } = render(CredentialsPage); - expect(() => unmount()).not.toThrow(); - }); - - it('should handle component initialization', async () => { - const { container } = render(CredentialsPage); - - // Component should initialize and render properly - expect(container).toBeInTheDocument(); - - // Should set page title during initialization - expect(document.title).toContain('Credentials - GARM'); - - // Should load credentials during initialization - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - expect(eagerCacheManager.getCredentials).toBeDefined(); - }); - }); - - describe('Data Transformation', () => { - it('should handle private key encoding', async () => { - render(CredentialsPage); - - // Component should have file processing capabilities for private keys - expect(FileReader).toBeDefined(); - expect(btoa).toBeDefined(); - - // Should support private key uploads for GitHub App credentials - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.createGithubCredentials).toBeDefined(); - expect(garmApi.updateGithubCredentials).toBeDefined(); - }); - - it('should handle private key decoding', async () => { - render(CredentialsPage); - - // Component should have decoding capabilities for private key display - expect(atob).toBeDefined(); - - // Should support private key updates for GitHub App credentials - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.updateGithubCredentials).toBeDefined(); - - // Should handle error cases during decoding - const { extractAPIError } = await import('$lib/utils/apiError'); - expect(extractAPIError).toBeDefined(); - }); - - it('should build update parameters correctly', async () => { - render(CredentialsPage); - - // Component should have update APIs for parameter building - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.updateGithubCredentials).toBeDefined(); - expect(garmApi.updateGiteaCredentials).toBeDefined(); - - // Should provide feedback when no changes are detected - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.info).toBeDefined(); - - // Should handle error cases during parameter building - expect(toastStore.error).toBeDefined(); - }); - }); - - describe('Utility Functions', () => { - it('should have getForgeIcon utility available', async () => { - const { getForgeIcon } = await import('$lib/utils/common.js'); - - render(CredentialsPage); - - expect(getForgeIcon).toBeDefined(); - }); - - it('should use forge icon for different credential types', async () => { - const { getForgeIcon } = await import('$lib/utils/common.js'); - - render(CredentialsPage); - - expect(getForgeIcon).toBeDefined(); - }); - - it('should handle API error extraction', async () => { - const { extractAPIError } = await import('$lib/utils/apiError'); - - render(CredentialsPage); - - expect(extractAPIError).toBeDefined(); - }); - - it('should handle filtering credentials', async () => { - const { filterCredentials } = await import('$lib/utils/common.js'); - - render(CredentialsPage); - - expect(filterCredentials).toBeDefined(); - }); - - it('should handle endpoint filtering by forge type', async () => { - render(CredentialsPage); - - // Component should filter endpoints based on selected forge type - expect(document.title).toContain('Credentials - GARM'); - - // Should load endpoints for filtering dropdown - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - expect(eagerCacheManager.getEndpoints).toBeDefined(); - - // Should support both GitHub and Gitea endpoint filtering - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.createGithubCredentials).toBeDefined(); - expect(garmApi.createGiteaCredentials).toBeDefined(); - - // Should have forge icon utility for endpoint type display - const { getForgeIcon } = await import('$lib/utils/common.js'); - expect(getForgeIcon).toBeDefined(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/endpoints/+page.svelte b/webapp/src/routes/endpoints/+page.svelte deleted file mode 100644 index 98fef52e..00000000 --- a/webapp/src/routes/endpoints/+page.svelte +++ /dev/null @@ -1,835 +0,0 @@ - - - - - - Endpoints - GARM - - -
                - - - - - - -
                -
                -
                -

                - {endpoint.name} -

                -

                - {endpoint.description} -

                -
                - {@html getForgeIcon(endpoint.endpoint_type || '', 'w-5 h-5')} - {endpoint.endpoint_type} -
                -
                -
                -
                - showEditEndpointModal(endpoint)} - /> - showDeleteEndpointModal(endpoint)} - /> -
                -
                -
                - -
                -
                - - -{#if showCreateModal} - -{/if} - - -{#if showEditModal && editingEndpoint} - -{/if} - - -{#if showDeleteModal && deletingEndpoint} - -{/if} \ No newline at end of file diff --git a/webapp/src/routes/endpoints/page.integration.test.ts b/webapp/src/routes/endpoints/page.integration.test.ts deleted file mode 100644 index 72cb1d9b..00000000 --- a/webapp/src/routes/endpoints/page.integration.test.ts +++ /dev/null @@ -1,652 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render, screen, waitFor } from '@testing-library/svelte'; -import EndpointsPage from './+page.svelte'; -import { createMockForgeEndpoint, createMockGiteaEndpoint } from '../../test/factories.js'; - -// Mock app stores and navigation -vi.mock('$app/stores', () => ({})); - -vi.mock('$app/navigation', () => ({})); - -const mockGithubEndpoint = createMockForgeEndpoint({ - name: 'github.com', - description: 'GitHub.com endpoint', - endpoint_type: 'github' -}); - -const mockGiteaEndpoint = createMockGiteaEndpoint({ - name: 'gitea.example.com', - description: 'Gitea endpoint', - endpoint_type: 'gitea' -}); - -const mockEndpoints = [mockGithubEndpoint, mockGiteaEndpoint]; - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/PageHeader.svelte'); -vi.unmock('$lib/components/DataTable.svelte'); -vi.unmock('$lib/components/ForgeTypeSelector.svelte'); -vi.unmock('$lib/components/ActionButton.svelte'); -vi.unmock('$lib/components/cells'); - -// Only mock the data layer - APIs and stores -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - listGithubEndpoints: vi.fn(), - listGiteaEndpoints: vi.fn(), - createGithubEndpoint: vi.fn(), - createGiteaEndpoint: vi.fn(), - updateGithubEndpoint: vi.fn(), - updateGiteaEndpoint: vi.fn(), - deleteGithubEndpoint: vi.fn(), - deleteGiteaEndpoint: vi.fn() - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn() - } -})); - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback) => { - callback({ - endpoints: mockEndpoints, - loading: { endpoints: false }, - loaded: { endpoints: true }, - errorMessages: { endpoints: '' } - }); - return () => {}; - }) - }, - eagerCacheManager: { - getEndpoints: vi.fn(), - retryResource: vi.fn() - } -})); - -vi.mock('$lib/utils/common.js', () => ({ - getForgeIcon: vi.fn(() => ''), - filterEndpoints: vi.fn((endpoints, searchTerm) => { - if (!searchTerm) return endpoints; - return endpoints.filter((endpoint: any) => - endpoint.name?.toLowerCase().includes(searchTerm.toLowerCase()) || - endpoint.description?.toLowerCase().includes(searchTerm.toLowerCase()) - ); - }), - changePerPage: vi.fn((perPage) => ({ newPerPage: perPage, newCurrentPage: 1 })), - paginateItems: vi.fn((items, currentPage, perPage) => { - const start = (currentPage - 1) * perPage; - return items.slice(start, start + perPage); - }), - formatDate: vi.fn((date) => date) -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -// Global setup for each test -let garmApi: any; -let eagerCacheManager: any; - -describe('Comprehensive Integration Tests for Endpoints Page', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up API mocks with default successful responses - const apiModule = await import('$lib/api/client.js'); - garmApi = apiModule.garmApi; - - const cacheModule = await import('$lib/stores/eager-cache.js'); - eagerCacheManager = cacheModule.eagerCacheManager; - - (eagerCacheManager.getEndpoints as any).mockResolvedValue(mockEndpoints); - (garmApi.createGithubEndpoint as any).mockResolvedValue({}); - (garmApi.createGiteaEndpoint as any).mockResolvedValue({}); - (garmApi.updateGithubEndpoint as any).mockResolvedValue({}); - (garmApi.updateGiteaEndpoint as any).mockResolvedValue({}); - (garmApi.deleteGithubEndpoint as any).mockResolvedValue({}); - (garmApi.deleteGiteaEndpoint as any).mockResolvedValue({}); - }); - - describe('Component Rendering and Data Display', () => { - it('should render endpoints page with real components', async () => { - render(EndpointsPage); - - await waitFor(() => { - // Wait for data to load - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - - // Should render the page header - expect(screen.getByRole('heading', { name: 'Endpoints' })).toBeInTheDocument(); - - // Should render page description - expect(screen.getByText(/Manage your GitHub and Gitea endpoints/i)).toBeInTheDocument(); - }); - - it('should display endpoints data in the table', async () => { - render(EndpointsPage); - - await waitFor(() => { - // Wait for data loading to complete - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - - // Component should render the DataTable component which would display endpoint data - // The exact endpoint names may not be visible due to how the DataTable renders data - // but the structure should be in place for displaying endpoints - expect(document.body).toBeInTheDocument(); - }); - - it('should render all major sections when data is loaded', async () => { - render(EndpointsPage); - - await waitFor(() => { - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - - // Should have page header with action button - expect(screen.getByRole('button', { name: /Add Endpoint/i })).toBeInTheDocument(); - - // Should show the data table structure - expect(document.body).toBeInTheDocument(); - }); - }); - - describe('Search and Filtering Integration', () => { - it('should handle search functionality', async () => { - const { filterEndpoints } = await import('$lib/utils/common.js'); - - render(EndpointsPage); - - await waitFor(() => { - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - - // Search functionality should be integrated - expect(filterEndpoints).toHaveBeenCalledWith(mockEndpoints, ''); - }); - - it('should filter endpoints based on search term', async () => { - const { filterEndpoints } = await import('$lib/utils/common.js'); - - render(EndpointsPage); - - await waitFor(() => { - // Should call filter function with empty search term initially - expect(filterEndpoints).toHaveBeenCalledWith(mockEndpoints, ''); - }); - - // Verify filtering logic works correctly - const filteredResults = filterEndpoints(mockEndpoints, 'github'); - expect(filteredResults).toHaveLength(1); - expect(filteredResults[0].name).toBe('github.com'); - }); - }); - - describe('Pagination Integration', () => { - it('should handle pagination with real data', async () => { - const { paginateItems } = await import('$lib/utils/common.js'); - - render(EndpointsPage); - - await waitFor(() => { - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - - // Should paginate the endpoints data - expect(paginateItems).toHaveBeenCalledWith(mockEndpoints, 1, 25); - }); - - it('should handle per-page changes', async () => { - const { changePerPage } = await import('$lib/utils/common.js'); - - render(EndpointsPage); - - await waitFor(() => { - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - - // Change per page functionality should be available - expect(changePerPage).toBeDefined(); - }); - }); - - describe('Modal Integration', () => { - it('should handle create endpoint modal workflow', async () => { - render(EndpointsPage); - - await waitFor(() => { - // Should have Add Endpoint button - expect(screen.getByRole('button', { name: /Add Endpoint/i })).toBeInTheDocument(); - }); - - // Should have the PageHeader component integrated with create action - const addButton = screen.getByRole('button', { name: /Add Endpoint/i }); - expect(addButton).toHaveClass('bg-blue-600'); - - // Create API methods should be available for the modal workflow - expect(garmApi.createGithubEndpoint).toBeDefined(); - expect(garmApi.createGiteaEndpoint).toBeDefined(); - - // Toast notifications should be integrated for success/error feedback - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.success).toBeDefined(); - expect(toastStore.error).toBeDefined(); - }); - - it('should handle edit endpoint modal workflow', async () => { - render(EndpointsPage); - - await waitFor(() => { - // Wait for data to load through API integration - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - - // Update API should be available for the edit workflow - expect(garmApi.updateGithubEndpoint).toBeDefined(); - expect(garmApi.updateGiteaEndpoint).toBeDefined(); - - // The edit functionality should be integrated through the DataTable component - // Edit buttons may not be visible when no data is loaded, but the API structure should be in place - expect(screen.getByRole('heading', { name: 'Endpoints' })).toBeInTheDocument(); - }); - - it('should handle delete endpoint modal workflow', async () => { - render(EndpointsPage); - - await waitFor(() => { - // Wait for data to load through API integration - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - - // Delete API should be available for the delete workflow - expect(garmApi.deleteGithubEndpoint).toBeDefined(); - expect(garmApi.deleteGiteaEndpoint).toBeDefined(); - - // Confirmation modal and error handling should be integrated - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.success).toBeDefined(); - expect(toastStore.error).toBeDefined(); - - // The delete functionality should be integrated through the DataTable component - // Delete buttons may not be visible when no data is loaded, but the infrastructure should be in place - expect(screen.getByRole('heading', { name: 'Endpoints' })).toBeInTheDocument(); - }); - }); - - describe('API Integration', () => { - it('should call eager cache manager when component mounts', async () => { - render(EndpointsPage); - - // Wait for API calls to complete and data to be displayed - await waitFor(() => { - // Verify the component actually called the eager cache to load data - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - - // More importantly, verify the component displays the loaded data - // Data should be integrated through the eager cache system - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - }); - - it('should display loading state initially then show data', async () => { - // Mock delayed cache response - (eagerCacheManager.getEndpoints as any).mockImplementation(() => - new Promise(resolve => setTimeout(() => resolve(mockEndpoints), 100)) - ); - - render(EndpointsPage); - - // Component should render the basic structure immediately - expect(screen.getByRole('heading', { name: 'Endpoints' })).toBeInTheDocument(); - - // After cache resolves, data loading should be complete - await waitFor(() => { - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }, { timeout: 1000 }); - - // Component should handle data loading properly through the cache system - expect(screen.getByText(/Manage your GitHub and Gitea endpoints/i)).toBeInTheDocument(); - }); - - it('should handle API errors and display error state', async () => { - // Mock cache to fail - const error = new Error('Failed to load endpoints'); - (eagerCacheManager.getEndpoints as any).mockRejectedValue(error); - - const { container } = render(EndpointsPage); - - // Wait for error to be handled - await waitFor(() => { - // Component should handle the error gracefully and continue to render - expect(container).toBeInTheDocument(); - }); - - // Should still render page structure even when data loading fails - expect(screen.getByRole('heading', { name: 'Endpoints' })).toBeInTheDocument(); - - // Error handling should be integrated with retry functionality - expect(eagerCacheManager.retryResource).toBeDefined(); - - // Toast error notifications should be available for error feedback - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.error).toBeDefined(); - }); - - it('should handle retry functionality', async () => { - render(EndpointsPage); - - await waitFor(() => { - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - - // Retry functionality should be available - expect(eagerCacheManager.retryResource).toBeDefined(); - }); - }); - - describe('Endpoint Creation Integration', () => { - it('should integrate GitHub endpoint creation workflow', async () => { - render(EndpointsPage); - - await waitFor(() => { - // Should have the structure in place for GitHub endpoint creation - expect(screen.getByRole('button', { name: /Add Endpoint/i })).toBeInTheDocument(); - }); - - // The GitHub endpoint creation workflow should be integrated - expect(garmApi.createGithubEndpoint).toBeDefined(); - }); - - it('should integrate Gitea endpoint creation workflow', async () => { - render(EndpointsPage); - - await waitFor(() => { - // Should have the structure in place for Gitea endpoint creation - expect(screen.getByRole('button', { name: /Add Endpoint/i })).toBeInTheDocument(); - }); - - // The Gitea endpoint creation workflow should be integrated - expect(garmApi.createGiteaEndpoint).toBeDefined(); - }); - - it('should show success message after endpoint creation', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(EndpointsPage); - - await waitFor(() => { - expect(screen.getByRole('button', { name: /Add Endpoint/i })).toBeInTheDocument(); - }); - - // Success toast functionality should be integrated - expect(toastStore.success).toBeDefined(); - }); - }); - - describe('Endpoint Update Integration', () => { - it('should integrate GitHub endpoint update workflow', async () => { - render(EndpointsPage); - - await waitFor(() => { - // Wait for data loading to be called - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - - // Update functionality should be available for GitHub endpoints - expect(garmApi.updateGithubEndpoint).toBeDefined(); - - // Component should be ready to handle GitHub endpoint updates - expect(screen.getByRole('heading', { name: 'Endpoints' })).toBeInTheDocument(); - }); - - it('should integrate Gitea endpoint update workflow', async () => { - render(EndpointsPage); - - await waitFor(() => { - // Wait for data loading to be called - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - - // Update functionality should be available for Gitea endpoints - expect(garmApi.updateGiteaEndpoint).toBeDefined(); - - // Component should be ready to handle Gitea endpoint updates - expect(screen.getByRole('heading', { name: 'Endpoints' })).toBeInTheDocument(); - }); - - it('should handle selective field updates', async () => { - render(EndpointsPage); - - await waitFor(() => { - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - - // Update APIs should be available for selective field updates - expect(garmApi.updateGithubEndpoint).toBeDefined(); - expect(garmApi.updateGiteaEndpoint).toBeDefined(); - - // Component should track original form data for comparison - // This enables selective updates where only changed fields are sent - expect(screen.getByRole('heading', { name: 'Endpoints' })).toBeInTheDocument(); - - // Toast notifications should provide feedback for update operations - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.success).toBeDefined(); - expect(toastStore.info).toBeDefined(); - }); - }); - - describe('Endpoint Deletion Integration', () => { - it('should integrate GitHub endpoint deletion workflow', async () => { - render(EndpointsPage); - - await waitFor(() => { - // Wait for data loading to be called - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - - // Deletion functionality should be available - expect(garmApi.deleteGithubEndpoint).toBeDefined(); - - // Component should be ready to handle GitHub endpoint deletion - expect(screen.getByRole('heading', { name: 'Endpoints' })).toBeInTheDocument(); - }); - - it('should integrate Gitea endpoint deletion workflow', async () => { - render(EndpointsPage); - - await waitFor(() => { - // Wait for data loading to be called - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - - // Deletion functionality should be available - expect(garmApi.deleteGiteaEndpoint).toBeDefined(); - - // Component should be ready to handle Gitea endpoint deletion - expect(screen.getByRole('heading', { name: 'Endpoints' })).toBeInTheDocument(); - }); - - it('should show error handling structure for endpoint deletion', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - // Set up API to fail when deleteGithubEndpoint is called - const error = new Error('Endpoint deletion failed'); - (garmApi.deleteGithubEndpoint as any).mockRejectedValue(error); - - render(EndpointsPage); - - await waitFor(() => { - // Wait for data loading to be called - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - - // Verify the component has the proper structure for deletion error handling - expect(toastStore.error).toBeDefined(); - expect(screen.getByRole('heading', { name: 'Endpoints' })).toBeInTheDocument(); - }); - }); - - describe('Component Integration and State Management', () => { - it('should integrate all sections with proper data flow', async () => { - render(EndpointsPage); - - await waitFor(() => { - // All sections should integrate properly with the main page - expect(screen.getByRole('heading', { name: 'Endpoints' })).toBeInTheDocument(); - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - - // Data flow should be properly integrated through the eager cache system - expect(screen.getByText(/Manage your GitHub and Gitea endpoints/i)).toBeInTheDocument(); - }); - - it('should maintain consistent state across components', async () => { - render(EndpointsPage); - - await waitFor(() => { - // State should be consistent across all child components - // Data should be integrated through the eager cache system - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - }); - - it('should handle component lifecycle correctly', () => { - const { unmount } = render(EndpointsPage); - - // Should unmount without errors - expect(() => unmount()).not.toThrow(); - }); - }); - - describe('Form Integration', () => { - it('should integrate form validation', async () => { - render(EndpointsPage); - - await waitFor(() => { - // Form validation should be integrated in the modals - expect(screen.getByRole('button', { name: /Add Endpoint/i })).toBeInTheDocument(); - }); - - // Create and update APIs should be available for form submission - expect(garmApi.createGithubEndpoint).toBeDefined(); - expect(garmApi.createGiteaEndpoint).toBeDefined(); - expect(garmApi.updateGithubEndpoint).toBeDefined(); - expect(garmApi.updateGiteaEndpoint).toBeDefined(); - - // Error handling should be integrated for validation failures - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.error).toBeDefined(); - }); - - it('should handle file upload integration', async () => { - render(EndpointsPage); - - await waitFor(() => { - // File upload functionality should be available for CA certificates - expect(screen.getByRole('button', { name: /Add Endpoint/i })).toBeInTheDocument(); - }); - - // Both endpoint types should support CA certificate uploads - expect(garmApi.createGithubEndpoint).toBeDefined(); - expect(garmApi.createGiteaEndpoint).toBeDefined(); - - // File processing should be available for base64 encoding - // This enables CA certificate bundle handling in the forms - expect(true).toBe(true); - }); - - it('should handle forge type selection', async () => { - render(EndpointsPage); - - await waitFor(() => { - // Forge type selection should be integrated - expect(screen.getByRole('button', { name: /Add Endpoint/i })).toBeInTheDocument(); - }); - - // Should support both GitHub and Gitea endpoint types - expect(garmApi.createGithubEndpoint).toBeDefined(); - expect(garmApi.createGiteaEndpoint).toBeDefined(); - - // Forge icon utility should be available for type display - const { getForgeIcon } = await import('$lib/utils/common.js'); - expect(getForgeIcon).toBeDefined(); - }); - }); - - describe('User Interaction Flows', () => { - it('should support various user interaction flows', async () => { - render(EndpointsPage); - - await waitFor(() => { - // Should support user interactions like search, pagination, CRUD operations - expect(screen.getByRole('button', { name: /Add Endpoint/i })).toBeInTheDocument(); - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - - // Should have search functionality available - expect(screen.getByPlaceholderText(/Search endpoints/i)).toBeInTheDocument(); - }); - - it('should handle keyboard shortcuts', async () => { - render(EndpointsPage); - - await waitFor(() => { - // Should handle keyboard navigation and shortcuts - expect(screen.getByRole('button', { name: /Add Endpoint/i })).toBeInTheDocument(); - }); - - // Should have keyboard accessible buttons and interactive elements - const addButton = screen.getByRole('button', { name: /Add Endpoint/i }); - expect(addButton).toHaveAttribute('type', 'button'); - - // Window event listeners should be set up for keyboard handling - // This includes Escape key for modal closing and other shortcuts - expect(window.addEventListener).toBeDefined(); - - // Component should handle focus management for accessibility - expect(document.activeElement).toBeDefined(); - }); - }); - - describe('Accessibility and Responsive Design', () => { - it('should have proper accessibility attributes', async () => { - render(EndpointsPage); - - await waitFor(() => { - // Should have proper ARIA attributes and labels - expect(screen.getByRole('heading', { name: 'Endpoints' })).toBeInTheDocument(); - expect(screen.getByRole('button', { name: /Add Endpoint/i })).toBeInTheDocument(); - }); - }); - - it('should be responsive across different viewport sizes', async () => { - render(EndpointsPage); - - await waitFor(() => { - // Should render properly across different viewport sizes - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - - // Page structure should be responsive - expect(screen.getByRole('heading', { name: 'Endpoints' })).toBeInTheDocument(); - }); - - it('should handle screen reader compatibility', async () => { - render(EndpointsPage); - - await waitFor(() => { - // Should be compatible with screen readers - expect(screen.getByRole('heading', { name: 'Endpoints' })).toBeInTheDocument(); - }); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/endpoints/page.render.test.ts b/webapp/src/routes/endpoints/page.render.test.ts deleted file mode 100644 index 42a73fa6..00000000 --- a/webapp/src/routes/endpoints/page.render.test.ts +++ /dev/null @@ -1,183 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render } from '@testing-library/svelte'; -import EndpointsPage from './+page.svelte'; -import { createMockForgeEndpoint } from '../../test/factories.js'; - -// Mock all external dependencies -vi.mock('$app/stores', () => ({})); - -vi.mock('$app/navigation', () => ({})); - -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - listGithubEndpoints: vi.fn(), - listGiteaEndpoints: vi.fn(), - createGithubEndpoint: vi.fn(), - createGiteaEndpoint: vi.fn(), - updateGithubEndpoint: vi.fn(), - updateGiteaEndpoint: vi.fn(), - deleteGithubEndpoint: vi.fn(), - deleteGiteaEndpoint: vi.fn() - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn() - } -})); - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback) => { - callback({ - endpoints: [], - loading: { endpoints: false }, - loaded: { endpoints: false }, - errorMessages: { endpoints: '' } - }); - return () => {}; - }) - }, - eagerCacheManager: { - getEndpoints: vi.fn(), - retryResource: vi.fn() - } -})); - -vi.mock('$lib/utils/common.js', () => ({ - getForgeIcon: vi.fn(() => 'github'), - filterEndpoints: vi.fn((endpoints) => endpoints), - changePerPage: vi.fn((perPage) => ({ newPerPage: perPage, newCurrentPage: 1 })), - paginateItems: vi.fn((items) => items), - formatDate: vi.fn((date) => date) -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -const mockEndpoint = createMockForgeEndpoint({ - name: 'github.com', - description: 'GitHub.com endpoint', - endpoint_type: 'github' -}); - -describe('Endpoints Page - Render Tests', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up default API mocks - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - (eagerCacheManager.getEndpoints as any).mockResolvedValue([mockEndpoint]); - }); - - describe('Basic Rendering', () => { - it('should render without crashing', () => { - const { container } = render(EndpointsPage); - expect(container).toBeInTheDocument(); - }); - - it('should have proper document structure', () => { - const { container } = render(EndpointsPage); - expect(container.querySelector('div')).toBeInTheDocument(); - }); - - it('should render page header', () => { - const { container } = render(EndpointsPage); - // Should have page header component - expect(container).toBeInTheDocument(); - }); - - it('should render data table', () => { - const { container } = render(EndpointsPage); - // Should have DataTable component - expect(container).toBeInTheDocument(); - }); - }); - - describe('Component Lifecycle', () => { - it('should mount successfully', () => { - const { component } = render(EndpointsPage); - expect(component).toBeDefined(); - }); - - it('should unmount without errors', () => { - const { unmount } = render(EndpointsPage); - expect(() => unmount()).not.toThrow(); - }); - - it('should handle component updates', async () => { - const { component } = render(EndpointsPage); - - // Component should handle reactive updates - expect(component).toBeDefined(); - }); - - it('should load endpoints on mount', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - - render(EndpointsPage); - - // Wait for component mount and data loading - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should call eager cache to load endpoints - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - }); - - describe('DOM Structure', () => { - it('should create proper DOM hierarchy', () => { - const { container } = render(EndpointsPage); - - // Should have main container with proper spacing - const mainDiv = container.querySelector('div.space-y-6'); - expect(mainDiv).toBeInTheDocument(); - }); - - it('should render svelte:head for page title', async () => { - render(EndpointsPage); - - // Should set page title - expect(document.title).toContain('Endpoints - GARM'); - }); - - it('should handle window event listeners', () => { - render(EndpointsPage); - - // Window should have event listener capabilities available - expect(window.addEventListener).toBeDefined(); - expect(window.removeEventListener).toBeDefined(); - - // Component should be able to handle keyboard events for modal management - expect(document).toBeDefined(); - expect(document.addEventListener).toBeDefined(); - }); - }); - - describe('Modal Rendering', () => { - it('should conditionally render create modal', () => { - const { container } = render(EndpointsPage); - - // Create modal should not be visible initially - expect(container).toBeInTheDocument(); - }); - - it('should conditionally render edit modal', () => { - const { container } = render(EndpointsPage); - - // Edit modal should not be visible initially - expect(container).toBeInTheDocument(); - }); - - it('should conditionally render delete modal', () => { - const { container } = render(EndpointsPage); - - // Delete modal should not be visible initially - expect(container).toBeInTheDocument(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/endpoints/page.test.ts b/webapp/src/routes/endpoints/page.test.ts deleted file mode 100644 index b76d3581..00000000 --- a/webapp/src/routes/endpoints/page.test.ts +++ /dev/null @@ -1,530 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render } from '@testing-library/svelte'; -import EndpointsPage from './+page.svelte'; -import { createMockForgeEndpoint, createMockGiteaEndpoint } from '../../test/factories.js'; - -// Mock the page stores -vi.mock('$app/stores', () => ({})); - -// Mock navigation -vi.mock('$app/navigation', () => ({})); - -// Mock the API client -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - listGithubEndpoints: vi.fn(), - listGiteaEndpoints: vi.fn(), - createGithubEndpoint: vi.fn(), - createGiteaEndpoint: vi.fn(), - updateGithubEndpoint: vi.fn(), - updateGiteaEndpoint: vi.fn(), - deleteGithubEndpoint: vi.fn(), - deleteGiteaEndpoint: vi.fn() - } -})); - -// Mock stores -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn() - } -})); - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback) => { - callback({ - endpoints: [], - loading: { endpoints: false }, - loaded: { endpoints: false }, - errorMessages: { endpoints: '' } - }); - return () => {}; - }) - }, - eagerCacheManager: { - getEndpoints: vi.fn(), - retryResource: vi.fn() - } -})); - -// Mock utilities -vi.mock('$lib/utils/common.js', () => ({ - getForgeIcon: vi.fn(() => 'github'), - filterEndpoints: vi.fn((endpoints) => endpoints), - changePerPage: vi.fn((perPage) => ({ newPerPage: perPage, newCurrentPage: 1 })), - paginateItems: vi.fn((items) => items), - formatDate: vi.fn((date) => date) -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -const mockGithubEndpoint = createMockForgeEndpoint({ - name: 'github.com', - description: 'GitHub.com endpoint', - endpoint_type: 'github' -}); - -const mockGiteaEndpoint = createMockGiteaEndpoint({ - name: 'gitea.example.com', - description: 'Gitea endpoint', - endpoint_type: 'gitea' -}); - -const mockEndpoints = [mockGithubEndpoint, mockGiteaEndpoint]; - -describe('Endpoints Page - Unit Tests', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up default eager cache mock - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - (eagerCacheManager.getEndpoints as any).mockResolvedValue(mockEndpoints); - }); - - describe('Component Initialization', () => { - it('should render successfully', () => { - const { container } = render(EndpointsPage); - expect(container).toBeInTheDocument(); - }); - - it('should set page title', () => { - render(EndpointsPage); - expect(document.title).toContain('Endpoints - GARM'); - }); - }); - - describe('Data Loading', () => { - it('should load endpoints on mount', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - - render(EndpointsPage); - - // Wait for component mount - await new Promise(resolve => setTimeout(resolve, 0)); - - expect(eagerCacheManager.getEndpoints).toHaveBeenCalled(); - }); - - it('should handle loading state', async () => { - const { container } = render(EndpointsPage); - - // Component should render without error during loading - expect(container).toBeInTheDocument(); - - // Should have access to loading state through eager cache - expect(document.title).toContain('Endpoints - GARM'); - - // Loading infrastructure should be properly integrated - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - expect(eagerCache.subscribe).toBeDefined(); - }); - - it('should handle cache error state', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - - // Mock cache to fail - const error = new Error('Failed to load endpoints'); - (eagerCacheManager.getEndpoints as any).mockRejectedValue(error); - - const { container } = render(EndpointsPage); - - // Wait for the error to be handled - await new Promise(resolve => setTimeout(resolve, 100)); - - // Component should handle error gracefully - expect(container).toBeInTheDocument(); - }); - - it('should retry loading endpoints', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - - render(EndpointsPage); - - // Verify retry functionality is available - expect(eagerCacheManager.retryResource).toBeDefined(); - }); - }); - - describe('Search and Pagination', () => { - it('should handle search functionality', async () => { - const { filterEndpoints } = await import('$lib/utils/common.js'); - - render(EndpointsPage); - - // Verify search utility is used - expect(filterEndpoints).toBeDefined(); - }); - - it('should handle pagination', async () => { - const { paginateItems, changePerPage } = await import('$lib/utils/common.js'); - - render(EndpointsPage); - - // Verify pagination utilities are available - expect(paginateItems).toBeDefined(); - expect(changePerPage).toBeDefined(); - }); - }); - - describe('Endpoint Creation', () => { - it('should have proper structure for GitHub endpoint creation', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(EndpointsPage); - - // Unit tests verify the component has access to the right dependencies - expect(garmApi.createGithubEndpoint).toBeDefined(); - }); - - it('should have proper structure for Gitea endpoint creation', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(EndpointsPage); - - // Unit tests verify the component has access to the right dependencies - expect(garmApi.createGiteaEndpoint).toBeDefined(); - }); - - it('should show success toast after endpoint creation', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(EndpointsPage); - - expect(toastStore.success).toBeDefined(); - }); - - it('should handle form validation', async () => { - render(EndpointsPage); - - // Component should have form validation infrastructure - expect(document.title).toContain('Endpoints - GARM'); - - // API error handling should be available for validation failures - const { extractAPIError } = await import('$lib/utils/apiError'); - expect(extractAPIError).toBeDefined(); - - // Toast notifications should be available for validation feedback - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.error).toBeDefined(); - }); - - it('should handle file upload for CA certificates', async () => { - render(EndpointsPage); - - // Component should support file processing for CA certificates - expect(document.title).toContain('Endpoints - GARM'); - - // Both GitHub and Gitea endpoints should support CA certificates - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.createGithubEndpoint).toBeDefined(); - expect(garmApi.createGiteaEndpoint).toBeDefined(); - - // File reader and base64 encoding should be available - expect(FileReader).toBeDefined(); - }); - }); - - describe('Endpoint Updates', () => { - it('should have proper structure for GitHub endpoint updates', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(EndpointsPage); - - expect(garmApi.updateGithubEndpoint).toBeDefined(); - }); - - it('should have proper structure for Gitea endpoint updates', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(EndpointsPage); - - expect(garmApi.updateGiteaEndpoint).toBeDefined(); - }); - - it('should show success toast after endpoint update', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(EndpointsPage); - - expect(toastStore.success).toBeDefined(); - }); - - it('should show info toast when no changes are made', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(EndpointsPage); - - expect(toastStore.info).toBeDefined(); - }); - - it('should handle selective field updates', async () => { - render(EndpointsPage); - - // Component should have update APIs for selective field changes - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.updateGithubEndpoint).toBeDefined(); - expect(garmApi.updateGiteaEndpoint).toBeDefined(); - - // Should have infrastructure to track original form values - expect(document.title).toContain('Endpoints - GARM'); - - // Toast notifications should provide feedback for update operations - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.success).toBeDefined(); - expect(toastStore.info).toBeDefined(); - }); - }); - - describe('Endpoint Deletion', () => { - it('should have proper structure for GitHub endpoint deletion', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(EndpointsPage); - - expect(garmApi.deleteGithubEndpoint).toBeDefined(); - }); - - it('should have proper structure for Gitea endpoint deletion', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(EndpointsPage); - - expect(garmApi.deleteGiteaEndpoint).toBeDefined(); - }); - - it('should show success toast after endpoint deletion', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(EndpointsPage); - - expect(toastStore.success).toBeDefined(); - }); - - it('should handle deletion errors', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(EndpointsPage); - - expect(toastStore.error).toBeDefined(); - }); - }); - - describe('Modal Management', () => { - it('should handle create modal state', async () => { - render(EndpointsPage); - - // Component should have create APIs for modal functionality - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.createGithubEndpoint).toBeDefined(); - expect(garmApi.createGiteaEndpoint).toBeDefined(); - - // Should have forge icon utility for modal display - const { getForgeIcon } = await import('$lib/utils/common.js'); - expect(getForgeIcon).toBeDefined(); - }); - - it('should handle edit modal state', async () => { - render(EndpointsPage); - - // Component should have update APIs for modal functionality - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.updateGithubEndpoint).toBeDefined(); - expect(garmApi.updateGiteaEndpoint).toBeDefined(); - - // Should have error handling for edit operations - const { extractAPIError } = await import('$lib/utils/apiError'); - expect(extractAPIError).toBeDefined(); - }); - - it('should handle delete modal state', async () => { - render(EndpointsPage); - - // Component should have delete APIs for modal functionality - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.deleteGithubEndpoint).toBeDefined(); - expect(garmApi.deleteGiteaEndpoint).toBeDefined(); - - // Should have toast notifications for delete feedback - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.success).toBeDefined(); - expect(toastStore.error).toBeDefined(); - }); - - it('should handle forge type selection', async () => { - render(EndpointsPage); - - // Component should support both forge types - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.createGithubEndpoint).toBeDefined(); - expect(garmApi.createGiteaEndpoint).toBeDefined(); - - // Should have forge icon utility for type selection display - const { getForgeIcon } = await import('$lib/utils/common.js'); - expect(getForgeIcon).toBeDefined(); - }); - - it('should handle keyboard shortcuts', () => { - render(EndpointsPage); - - // Component should have keyboard event handling infrastructure - expect(window.addEventListener).toBeDefined(); - expect(window.removeEventListener).toBeDefined(); - - // Document should be available for keyboard event management - expect(document).toBeDefined(); - expect(document.addEventListener).toBeDefined(); - }); - }); - - describe('Form State Management', () => { - it('should reset form data', async () => { - render(EndpointsPage); - - // Component should have form reset infrastructure - expect(document.title).toContain('Endpoints - GARM'); - - // Should have APIs available for fresh form data - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.createGithubEndpoint).toBeDefined(); - expect(garmApi.createGiteaEndpoint).toBeDefined(); - }); - - it('should track original form data for updates', async () => { - render(EndpointsPage); - - // Component should have update APIs for form comparison - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.updateGithubEndpoint).toBeDefined(); - expect(garmApi.updateGiteaEndpoint).toBeDefined(); - - // Should have toast notifications for update feedback - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.info).toBeDefined(); - }); - - it('should handle different form fields for GitHub vs Gitea', async () => { - render(EndpointsPage); - - // Component should support both endpoint types with different APIs - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.createGithubEndpoint).toBeDefined(); - expect(garmApi.createGiteaEndpoint).toBeDefined(); - - // Should have forge icon utility to differentiate types - const { getForgeIcon } = await import('$lib/utils/common.js'); - expect(getForgeIcon).toBeDefined(); - }); - }); - - describe('Utility Functions', () => { - it('should have getForgeIcon utility available', async () => { - const { getForgeIcon } = await import('$lib/utils/common.js'); - - render(EndpointsPage); - - expect(getForgeIcon).toBeDefined(); - }); - - it('should use forge icon for different endpoint types', async () => { - const { getForgeIcon } = await import('$lib/utils/common.js'); - - render(EndpointsPage); - - expect(getForgeIcon).toBeDefined(); - }); - - it('should handle API error extraction', async () => { - const { extractAPIError } = await import('$lib/utils/apiError'); - - render(EndpointsPage); - - expect(extractAPIError).toBeDefined(); - }); - - it('should handle filtering endpoints', async () => { - const { filterEndpoints } = await import('$lib/utils/common.js'); - - render(EndpointsPage); - - expect(filterEndpoints).toBeDefined(); - }); - }); - - describe('Component Lifecycle', () => { - it('should mount successfully', () => { - const component = render(EndpointsPage); - expect(component.component).toBeDefined(); - }); - - it('should unmount without errors', () => { - const { unmount } = render(EndpointsPage); - expect(() => unmount()).not.toThrow(); - }); - - it('should handle component initialization', async () => { - const { container } = render(EndpointsPage); - - // Component should initialize and render properly - expect(container).toBeInTheDocument(); - - // Should set page title during initialization - expect(document.title).toContain('Endpoints - GARM'); - - // Should load endpoints during initialization - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - expect(eagerCacheManager.getEndpoints).toBeDefined(); - }); - }); - - describe('Data Transformation', () => { - it('should handle CA certificate encoding', async () => { - render(EndpointsPage); - - // Component should have file processing capabilities for CA certificates - expect(FileReader).toBeDefined(); - expect(btoa).toBeDefined(); - - // Should support CA certificates for both endpoint types - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.createGithubEndpoint).toBeDefined(); - expect(garmApi.createGiteaEndpoint).toBeDefined(); - }); - - it('should handle CA certificate decoding', async () => { - render(EndpointsPage); - - // Component should have decoding capabilities for CA certificate display - expect(atob).toBeDefined(); - - // Should support CA certificate updates for both endpoint types - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.updateGithubEndpoint).toBeDefined(); - expect(garmApi.updateGiteaEndpoint).toBeDefined(); - - // Should handle error cases during decoding - const { extractAPIError } = await import('$lib/utils/apiError'); - expect(extractAPIError).toBeDefined(); - }); - - it('should build update parameters correctly', async () => { - render(EndpointsPage); - - // Component should have update APIs for parameter building - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.updateGithubEndpoint).toBeDefined(); - expect(garmApi.updateGiteaEndpoint).toBeDefined(); - - // Should provide feedback when no changes are detected - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.info).toBeDefined(); - - // Should handle error cases during parameter building - expect(toastStore.error).toBeDefined(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/enterprises/+page.svelte b/webapp/src/routes/enterprises/+page.svelte deleted file mode 100644 index 33e0a430..00000000 --- a/webapp/src/routes/enterprises/+page.svelte +++ /dev/null @@ -1,331 +0,0 @@ - - - - Enterprises - GARM - - -
                - - - - - - - {@const status = getEntityStatusBadge(enterprise)} -
                - -
                - -
                - openUpdateModal(enterprise)} - /> - openDeleteModal(enterprise)} - /> -
                -
                -
                -
                - -
                -
                - - -{#if showCreateModal} - showCreateModal = false} - on:submit={(e) => handleCreateEnterprise(e.detail)} - /> -{/if} - -{#if showUpdateModal && selectedEnterprise} - { showUpdateModal = false; selectedEnterprise = null; }} - on:submit={(e) => handleUpdateEnterprise(e.detail)} - /> -{/if} - -{#if showDeleteModal && selectedEnterprise} - { showDeleteModal = false; selectedEnterprise = null; }} - on:confirm={handleDeleteEnterprise} - /> -{/if} \ No newline at end of file diff --git a/webapp/src/routes/enterprises/[id]/+page.svelte b/webapp/src/routes/enterprises/[id]/+page.svelte deleted file mode 100644 index bd62495b..00000000 --- a/webapp/src/routes/enterprises/[id]/+page.svelte +++ /dev/null @@ -1,391 +0,0 @@ - - - - {enterprise ? `${enterprise.name} - Enterprise Details` : 'Enterprise Details'} - GARM - - -
                - - - - {#if loading} -
                -
                -

                Loading enterprise...

                -
                - {:else if error} -
                -

                {error}

                -
                - {:else if enterprise} - - showUpdateModal = true} - onDelete={() => showDeleteModal = true} - /> - - - - - - - - - - - - - {/if} -
                - - -{#if showUpdateModal && enterprise} - showUpdateModal = false} - on:submit={(e) => handleUpdate(e.detail)} - /> -{/if} - -{#if showDeleteModal && enterprise} - showDeleteModal = false} - on:confirm={handleDelete} - /> -{/if} - -{#if showDeleteInstanceModal && selectedInstance} - { showDeleteInstanceModal = false; selectedInstance = null; }} - on:confirm={handleDeleteInstance} - /> -{/if} - -{#if showCreatePoolModal && enterprise} - showCreatePoolModal = false} - on:submit={handleCreatePool} - /> -{/if} \ No newline at end of file diff --git a/webapp/src/routes/enterprises/[id]/page.integration.test.ts b/webapp/src/routes/enterprises/[id]/page.integration.test.ts deleted file mode 100644 index 47f6b2f3..00000000 --- a/webapp/src/routes/enterprises/[id]/page.integration.test.ts +++ /dev/null @@ -1,487 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render, screen, waitFor } from '@testing-library/svelte'; -import EnterpriseDetailsPage from './+page.svelte'; -import { createMockEnterprise, createMockInstance } from '../../../test/factories.js'; - -// Mock page store -vi.mock('$app/stores', () => ({ - page: { - subscribe: vi.fn((callback) => { - callback({ params: { id: 'ent-123' } }); - return () => {}; - }) - } -})); - -// Mock navigation -vi.mock('$app/navigation', () => ({ - goto: vi.fn() -})); - -// Mock path resolution -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path) => path) -})); - -const mockEnterprise = createMockEnterprise({ - id: 'ent-123', - name: 'test-enterprise', - endpoint: { - name: 'github.com' - }, - events: [ - { - id: 1, - created_at: '2024-01-01T00:00:00Z', - event_level: 'info', - message: 'Enterprise created' - }, - { - id: 2, - created_at: '2024-01-01T01:00:00Z', - event_level: 'warning', - message: 'Pool configuration changed' - } - ], - pool_manager_status: { running: true, failure_reason: undefined } -}); - -const mockPools = [ - { - id: 'pool-1', - enterprise_id: 'ent-123', - image: 'ubuntu:22.04', - enabled: true, - flavor: 'default', - max_runners: 5 - }, - { - id: 'pool-2', - enterprise_id: 'ent-123', - image: 'ubuntu:20.04', - enabled: false, - flavor: 'default', - max_runners: 3 - } -]; - -const mockInstances = [ - createMockInstance({ - id: 'inst-1', - name: 'runner-1', - pool_id: 'pool-1', - status: 'running' - }), - createMockInstance({ - id: 'inst-2', - name: 'runner-2', - pool_id: 'pool-2', - status: 'idle' - }) -]; - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/UpdateEntityModal.svelte'); -vi.unmock('$lib/components/DeleteModal.svelte'); -vi.unmock('$lib/components/EntityInformation.svelte'); -vi.unmock('$lib/components/DetailHeader.svelte'); -vi.unmock('$lib/components/PoolsSection.svelte'); -vi.unmock('$lib/components/InstancesSection.svelte'); -vi.unmock('$lib/components/EventsSection.svelte'); -vi.unmock('$lib/components/CreatePoolModal.svelte'); -vi.unmock('$lib/components/cells'); - -// Only mock the data layer - APIs and stores -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - getEnterprise: vi.fn(), - listEnterprisePools: vi.fn(), - listEnterpriseInstances: vi.fn(), - updateEnterprise: vi.fn(), - deleteEnterprise: vi.fn(), - deleteInstance: vi.fn(), - createEnterprisePool: vi.fn() - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn() - } -})); - -vi.mock('$lib/stores/websocket.js', () => ({ - websocketStore: { - subscribeToEntity: vi.fn(() => () => {}), - subscribe: vi.fn(() => () => {}) - } -})); - -vi.mock('$lib/utils/common.js', () => ({ - getForgeIcon: vi.fn(() => 'github'), - formatDate: vi.fn((date) => date) -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -// Global setup for each test -let garmApi: any; - -describe('Comprehensive Integration Tests for Enterprise Details Page', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up API mocks with default successful responses - const apiModule = await import('$lib/api/client.js'); - garmApi = apiModule.garmApi; - - garmApi.getEnterprise.mockResolvedValue(mockEnterprise); - garmApi.listEnterprisePools.mockResolvedValue(mockPools); - garmApi.listEnterpriseInstances.mockResolvedValue(mockInstances); - garmApi.updateEnterprise.mockResolvedValue(mockEnterprise); - garmApi.deleteEnterprise.mockResolvedValue({}); - garmApi.deleteInstance.mockResolvedValue({}); - garmApi.createEnterprisePool.mockResolvedValue({}); - }); - - describe('Component Rendering and Data Display', () => { - it('should render enterprise details page with real components', async () => { - render(EnterpriseDetailsPage); - - await waitFor(() => { - // Wait for enterprise data to load - expect(garmApi.getEnterprise).toHaveBeenCalledWith('ent-123'); - }); - - // Should render the enterprise name in the breadcrumb and header - expect(screen.getByRole('heading', { name: 'test-enterprise' })).toBeInTheDocument(); - - // Should render the enterprise details - expect(screen.getByText('Endpoint: github.com • GitHub Enterprise')).toBeInTheDocument(); - }); - - it('should display breadcrumb navigation', async () => { - render(EnterpriseDetailsPage); - - const breadcrumb = screen.getByRole('navigation', { name: 'Breadcrumb' }); - expect(breadcrumb).toBeInTheDocument(); - - const enterprisesLink = screen.getByRole('link', { name: /enterprises/i }); - expect(enterprisesLink).toBeInTheDocument(); - expect(enterprisesLink).toHaveAttribute('href', '/enterprises'); - }); - - it('should render all major sections when data is loaded', async () => { - render(EnterpriseDetailsPage); - - await waitFor(() => { - expect(garmApi.getEnterprise).toHaveBeenCalledWith('ent-123'); - }); - - // Should have all major sections - expect(screen.getByText('Pools (2)')).toBeInTheDocument(); - expect(screen.getByText('Instances (2)')).toBeInTheDocument(); - expect(screen.getByText('Events')).toBeInTheDocument(); - }); - }); - - describe('Pools Section Integration', () => { - it('should display pools section with data', async () => { - render(EnterpriseDetailsPage); - - await waitFor(() => { - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle pool creation through UI', async () => { - render(EnterpriseDetailsPage); - - await waitFor(() => { - // Look for add pool functionality - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should display pools section and integrate with pools data', async () => { - render(EnterpriseDetailsPage); - - await waitFor(() => { - // Wait for enterprise and pools data to load - expect(garmApi.getEnterprise).toHaveBeenCalledWith('ent-123'); - expect(garmApi.listEnterprisePools).toHaveBeenCalledWith('ent-123'); - }); - - // Verify the component displays the pools section showing the correct count - // This confirms the component properly integrates with the API to load and display pool data - const poolsSection = screen.getByText('Pools (2)'); - expect(poolsSection).toBeInTheDocument(); - }); - }); - - describe('Instances Section Integration', () => { - it('should display instances section with data', async () => { - render(EnterpriseDetailsPage); - - await waitFor(() => { - // Should render instances section - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle instance deletion', async () => { - render(EnterpriseDetailsPage); - - await waitFor(() => { - // Look for instance management functionality - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should show error handling structure for instance deletion', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - // Set up API to fail when deleteInstance is called - const error = new Error('Instance deletion failed'); - garmApi.deleteInstance.mockRejectedValue(error); - - render(EnterpriseDetailsPage); - - await waitFor(() => { - // Wait for enterprise and instances data to load - expect(garmApi.getEnterprise).toHaveBeenCalledWith('ent-123'); - expect(garmApi.listEnterpriseInstances).toHaveBeenCalledWith('ent-123'); - }); - - // Verify the component has the proper structure for instance deletion error handling - // The handleDeleteInstance function should be set up to show error toasts - const instancesSection = screen.getByText('Instances (2)'); - expect(instancesSection).toBeInTheDocument(); - - // Verify there are delete buttons available for instances - const deleteButtons = screen.getAllByRole('button', { name: /delete/i }); - expect(deleteButtons.length).toBeGreaterThan(0); - - // The error handling workflow is: - // 1. User clicks delete button → modal opens - // 2. User confirms deletion → handleDeleteInstance() is called - // 3. handleDeleteInstance() calls API and catches errors - // 4. On error, toastStore.error is called with 'Delete Failed' message - // This structure is verified by the component rendering successfully - expect(toastStore.error).toBeDefined(); - }); - }); - - describe('Events Section Integration', () => { - it('should display events section with event data', async () => { - render(EnterpriseDetailsPage); - - await waitFor(() => { - expect(garmApi.getEnterprise).toHaveBeenCalledWith('ent-123'); - }); - - // Should show events section - expect(screen.getByText('Events')).toBeInTheDocument(); - }); - - it('should handle events scrolling', async () => { - render(EnterpriseDetailsPage); - - await waitFor(() => { - expect(screen.getByText('Events')).toBeInTheDocument(); - }); - }); - }); - - describe('Real-time Updates via WebSocket', () => { - it('should set up websocket subscriptions', async () => { - render(EnterpriseDetailsPage); - - await waitFor(() => { - // Should set up websocket subscriptions - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle enterprise update events', async () => { - render(EnterpriseDetailsPage); - - await waitFor(() => { - // Component should be prepared to handle websocket updates - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle pool and instance events', async () => { - render(EnterpriseDetailsPage); - - await waitFor(() => { - // Should handle pool and instance websocket events - expect(document.body).toBeInTheDocument(); - }); - }); - }); - - describe('API Integration', () => { - it('should call enterprise APIs when component mounts and display data', async () => { - render(EnterpriseDetailsPage); - - // Wait for API calls to complete and data to be displayed - await waitFor(() => { - // Verify the component actually called the APIs to load data - expect(garmApi.getEnterprise).toHaveBeenCalledWith('ent-123'); - expect(garmApi.listEnterprisePools).toHaveBeenCalledWith('ent-123'); - expect(garmApi.listEnterpriseInstances).toHaveBeenCalledWith('ent-123'); - - // More importantly, verify the component displays the loaded data - expect(screen.getByRole('heading', { name: 'test-enterprise' })).toBeInTheDocument(); - expect(screen.getByText('Pools (2)')).toBeInTheDocument(); - expect(screen.getByText('Instances (2)')).toBeInTheDocument(); - }); - }); - - it('should display loading state initially then show data', async () => { - // Mock delayed API responses - garmApi.getEnterprise.mockImplementation(() => - new Promise(resolve => setTimeout(() => resolve(mockEnterprise), 100)) - ); - - render(EnterpriseDetailsPage); - - // Initially, the enterprise name should not be visible yet - expect(screen.queryByRole('heading', { name: 'test-enterprise' })).not.toBeInTheDocument(); - - // After API resolves, should show actual data - await waitFor(() => { - expect(screen.getByRole('heading', { name: 'test-enterprise' })).toBeInTheDocument(); - }, { timeout: 1000 }); - - // Data should be properly displayed after loading - expect(screen.getByText('Pools (2)')).toBeInTheDocument(); - expect(screen.getByText('Instances (2)')).toBeInTheDocument(); - }); - - it('should handle API errors and display error state', async () => { - // Mock API to fail - const error = new Error('Failed to load enterprise'); - garmApi.getEnterprise.mockRejectedValue(error); - - const { container } = render(EnterpriseDetailsPage); - - // Wait for error to be handled and displayed - await waitFor(() => { - // Should show error state in the UI (red background, error message) - const errorElement = container.querySelector('.bg-red-50, .bg-red-900, .text-red-600, .text-red-400'); - expect(errorElement).toBeInTheDocument(); - }); - }); - - it('should integrate with websocket store for real-time updates', async () => { - const { websocketStore } = await import('$lib/stores/websocket.js'); - - render(EnterpriseDetailsPage); - - await waitFor(() => { - // Verify component subscribes to websocket updates for enterprise, pools, and instances - // Based on the component code, the actual calls are: - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith('enterprise', ['update', 'delete'], expect.any(Function)); - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith('pool', ['create', 'update', 'delete'], expect.any(Function)); - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith('instance', ['create', 'update', 'delete'], expect.any(Function)); - }); - - // The component properly sets up websocket integration to receive real-time updates - // This is verified by the subscription calls above and by the component's ability - // to display data that would be updated via websockets - expect(screen.getByRole('heading', { name: 'test-enterprise' })).toBeInTheDocument(); - }); - }); - - describe('Component Integration and State Management', () => { - it('should integrate all sections with proper data flow', async () => { - render(EnterpriseDetailsPage); - - await waitFor(() => { - // All sections should integrate properly with the main page - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should maintain consistent state across components', async () => { - render(EnterpriseDetailsPage); - - await waitFor(() => { - // State should be consistent across all child components - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle component lifecycle correctly', () => { - const { unmount } = render(EnterpriseDetailsPage); - - // Should unmount without errors - expect(() => unmount()).not.toThrow(); - }); - }); - - describe('User Interaction Flows', () => { - it('should support navigation interactions', async () => { - render(EnterpriseDetailsPage); - - await waitFor(() => { - // Should support various navigation interactions - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle keyboard navigation', async () => { - render(EnterpriseDetailsPage); - - await waitFor(() => { - // Should support keyboard navigation - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle form submissions and modal interactions', async () => { - render(EnterpriseDetailsPage); - - await waitFor(() => { - // Should handle form submissions and modal interactions - expect(document.body).toBeInTheDocument(); - }); - }); - }); - - describe('Accessibility and Responsive Design', () => { - it('should have proper accessibility attributes', async () => { - render(EnterpriseDetailsPage); - - await waitFor(() => { - // Should have proper ARIA attributes and labels - const breadcrumb = screen.getByRole('navigation', { name: 'Breadcrumb' }); - expect(breadcrumb).toBeInTheDocument(); - }); - }); - - it('should be responsive across different viewport sizes', async () => { - render(EnterpriseDetailsPage); - - await waitFor(() => { - // Should render properly across different viewport sizes - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle screen reader compatibility', async () => { - render(EnterpriseDetailsPage); - - await waitFor(() => { - // Should be compatible with screen readers - expect(screen.getByRole('navigation', { name: 'Breadcrumb' })).toBeInTheDocument(); - }); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/enterprises/[id]/page.render.test.ts b/webapp/src/routes/enterprises/[id]/page.render.test.ts deleted file mode 100644 index 709827a1..00000000 --- a/webapp/src/routes/enterprises/[id]/page.render.test.ts +++ /dev/null @@ -1,161 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render } from '@testing-library/svelte'; -import EnterpriseDetailsPage from './+page.svelte'; -import { createMockEnterprise } from '../../../test/factories.js'; - -// Mock all external dependencies -vi.mock('$app/stores', () => ({ - page: { - subscribe: vi.fn((callback) => { - callback({ params: { id: 'ent-123' } }); - return () => {}; - }) - } -})); - -vi.mock('$app/navigation', () => ({ - goto: vi.fn() -})); - -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path) => path) -})); - -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - getEnterprise: vi.fn(), - listEnterprisePools: vi.fn(), - listEnterpriseInstances: vi.fn(), - updateEnterprise: vi.fn(), - deleteEnterprise: vi.fn(), - deleteInstance: vi.fn(), - createEnterprisePool: vi.fn() - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn() - } -})); - -vi.mock('$lib/stores/websocket.js', () => ({ - websocketStore: { - subscribeToEntity: vi.fn(() => () => {}), - subscribe: vi.fn(() => () => {}) - } -})); - -vi.mock('$lib/utils/common.js', () => ({ - getForgeIcon: vi.fn(() => 'github'), - formatDate: vi.fn((date) => date) -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -const mockEnterprise = createMockEnterprise({ - id: 'ent-123', - name: 'test-enterprise', - endpoint: { - name: 'github.com' - }, - pool_manager_status: { running: true, failure_reason: undefined } -}); - -describe('Enterprise Details Page - Render Tests', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up default API mocks - const { garmApi } = await import('$lib/api/client.js'); - (garmApi.getEnterprise as any).mockResolvedValue(mockEnterprise); - (garmApi.listEnterprisePools as any).mockResolvedValue([]); - (garmApi.listEnterpriseInstances as any).mockResolvedValue([]); - }); - - describe('Basic Rendering', () => { - it('should render without crashing', () => { - const { container } = render(EnterpriseDetailsPage); - expect(container).toBeInTheDocument(); - }); - - it('should have proper document structure', () => { - const { container } = render(EnterpriseDetailsPage); - expect(container.querySelector('div')).toBeInTheDocument(); - }); - - it('should render breadcrumb navigation', () => { - const { container } = render(EnterpriseDetailsPage); - const breadcrumb = container.querySelector('[aria-label="Breadcrumb"]'); - expect(breadcrumb).toBeInTheDocument(); - }); - - it('should render loading state initially', () => { - const { container } = render(EnterpriseDetailsPage); - // Component should render some form of loading indicator or content - expect(container).toBeInTheDocument(); - }); - }); - - describe('Component Lifecycle', () => { - it('should mount successfully', () => { - const { component } = render(EnterpriseDetailsPage); - expect(component).toBeDefined(); - }); - - it('should unmount without errors', () => { - const { unmount } = render(EnterpriseDetailsPage); - expect(() => unmount()).not.toThrow(); - }); - - it('should handle component updates', async () => { - const { component } = render(EnterpriseDetailsPage); - - // Component should handle reactive updates - expect(component).toBeDefined(); - }); - - it('should set up websocket subscriptions on mount', async () => { - const { websocketStore } = await import('$lib/stores/websocket.js'); - - render(EnterpriseDetailsPage); - - // Wait for component mount and subscription setup - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should call subscription setup - expect(websocketStore.subscribeToEntity).toHaveBeenCalled(); - }); - }); - - describe('DOM Structure', () => { - it('should create proper DOM hierarchy', () => { - const { container } = render(EnterpriseDetailsPage); - - // Should have main container with proper spacing - const mainDiv = container.querySelector('div.space-y-6'); - expect(mainDiv).toBeInTheDocument(); - }); - - it('should render svelte:head for page title', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - // Mock enterprise data for the title - (garmApi.getEnterprise as any).mockResolvedValue(mockEnterprise); - - render(EnterpriseDetailsPage); - - // Initially should show generic title (before enterprise loads) - expect(document.title).toContain('Enterprise Details - GARM'); - - // Wait for enterprise data to load and title to update - await new Promise(resolve => setTimeout(resolve, 100)); - - // Should now show enterprise-specific title - expect(document.title).toContain('test-enterprise - Enterprise Details - GARM'); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/enterprises/[id]/page.test.ts b/webapp/src/routes/enterprises/[id]/page.test.ts deleted file mode 100644 index ccca5d59..00000000 --- a/webapp/src/routes/enterprises/[id]/page.test.ts +++ /dev/null @@ -1,451 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render } from '@testing-library/svelte'; -import EnterpriseDetailsPage from './+page.svelte'; -import { createMockEnterprise, createMockInstance } from '../../../test/factories.js'; - -// Mock the page store -vi.mock('$app/stores', () => ({ - page: { - subscribe: vi.fn((callback) => { - callback({ params: { id: 'ent-123' } }); - return () => {}; - }) - } -})); - -// Mock navigation -vi.mock('$app/navigation', () => ({ - goto: vi.fn() -})); - -// Mock path resolution -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path) => path) -})); - -// Mock the API client -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - getEnterprise: vi.fn(), - listEnterprisePools: vi.fn(), - listEnterpriseInstances: vi.fn(), - updateEnterprise: vi.fn(), - deleteEnterprise: vi.fn(), - deleteInstance: vi.fn(), - createEnterprisePool: vi.fn() - } -})); - -// Mock stores -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn() - } -})); - -vi.mock('$lib/stores/websocket.js', () => ({ - websocketStore: { - subscribeToEntity: vi.fn(() => () => {}), - subscribe: vi.fn(() => () => {}) - } -})); - -// Mock utilities -vi.mock('$lib/utils/common.js', () => ({ - getForgeIcon: vi.fn(() => 'github'), - formatDate: vi.fn((date) => date) -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -const mockEnterprise = createMockEnterprise({ - id: 'ent-123', - name: 'test-enterprise', - endpoint: { - name: 'github.com' - }, - events: [ - { - id: 1, - created_at: '2024-01-01T00:00:00Z', - event_level: 'info', - message: 'Enterprise created' - } - ], - pool_manager_status: { running: true, failure_reason: undefined } -}); - -const mockPools = [ - { - id: 'pool-1', - enterprise_id: 'ent-123', - image: 'ubuntu:22.04', - enabled: true, - flavor: 'default', - max_runners: 5 - }, - { - id: 'pool-2', - enterprise_id: 'ent-123', - image: 'ubuntu:20.04', - enabled: false, - flavor: 'default', - max_runners: 3 - } -]; - -const mockInstances = [ - createMockInstance({ - id: 'inst-1', - name: 'runner-1', - pool_id: 'pool-1', - status: 'running' - }), - createMockInstance({ - id: 'inst-2', - name: 'runner-2', - pool_id: 'pool-2', - status: 'idle' - }) -]; - -describe('Enterprise Details Page - Unit Tests', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up API mocks with default successful responses - const { garmApi } = await import('$lib/api/client.js'); - (garmApi.getEnterprise as any).mockResolvedValue(mockEnterprise); - (garmApi.listEnterprisePools as any).mockResolvedValue(mockPools); - (garmApi.listEnterpriseInstances as any).mockResolvedValue(mockInstances); - }); - - describe('Component Initialization', () => { - it('should render successfully', () => { - const { container } = render(EnterpriseDetailsPage); - expect(container).toBeInTheDocument(); - }); - - it('should set enterprise id from page params', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(EnterpriseDetailsPage); - - // Wait for the component to process the page params and make API calls - await new Promise(resolve => setTimeout(resolve, 0)); - - // Verify the component extracted the enterprise ID from page params and used it - expect(garmApi.getEnterprise).toHaveBeenCalledWith('ent-123'); - expect(garmApi.listEnterprisePools).toHaveBeenCalledWith('ent-123'); - expect(garmApi.listEnterpriseInstances).toHaveBeenCalledWith('ent-123'); - }); - }); - - describe('Data Loading', () => { - it('should load enterprise data on mount', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(EnterpriseDetailsPage); - - // Wait for the loadEnterprise function to be called - await new Promise(resolve => setTimeout(resolve, 0)); - - expect(garmApi.getEnterprise).toHaveBeenCalledWith('ent-123'); - expect(garmApi.listEnterprisePools).toHaveBeenCalledWith('ent-123'); - expect(garmApi.listEnterpriseInstances).toHaveBeenCalledWith('ent-123'); - }); - - it('should handle loading state', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - // Mock API to return a delayed promise to simulate loading - (garmApi.getEnterprise as any).mockImplementation(() => - new Promise(resolve => setTimeout(() => resolve(mockEnterprise), 100)) - ); - - const { container } = render(EnterpriseDetailsPage); - - // Initially should show loading state (before API resolves) - const loadingElement = container.querySelector('.animate-spin, .loading'); - expect(loadingElement).toBeInTheDocument(); - - // Wait for API to resolve and loading to complete - await new Promise(resolve => setTimeout(resolve, 150)); - }); - - it('should display error message when enterprise loading fails', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - // Simulate API error during enterprise loading - const error = new Error('Enterprise not found'); - (garmApi.getEnterprise as any).mockRejectedValue(error); - - const { container } = render(EnterpriseDetailsPage); - - // Wait for the component to handle the error - await new Promise(resolve => setTimeout(resolve, 100)); - - // Check that error message is displayed in the UI - const errorElement = container.querySelector('.bg-red-50, .bg-red-900'); - expect(errorElement).toBeInTheDocument(); - }); - - it('should handle API error with extractAPIError utility', async () => { - const { extractAPIError } = await import('$lib/utils/apiError'); - const error = new Error('Network error'); - - render(EnterpriseDetailsPage); - - expect(extractAPIError).toBeDefined(); - }); - }); - - describe('Enterprise Updates', () => { - it('should have proper structure for enterprise updates', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(EnterpriseDetailsPage); - - // Unit tests verify the component has access to the right dependencies - // The actual update workflow is tested in integration tests where we can - // trigger the real handleUpdate function via UI interactions - expect(garmApi.updateEnterprise).toBeDefined(); - }); - - it('should show success toast after update', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(EnterpriseDetailsPage); - - expect(toastStore.success).toBeDefined(); - }); - - it('should have proper error handling structure for updates', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(EnterpriseDetailsPage); - - // Unit tests verify the component has access to the right dependencies - // The actual error re-throwing behavior is tested through integration tests - // where we can trigger the real handleUpdate function via modal events - expect(garmApi.updateEnterprise).toBeDefined(); - }); - }); - - describe('Enterprise Deletion', () => { - it('should have proper structure for enterprise deletion', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(EnterpriseDetailsPage); - - // Unit tests verify the component has access to the right dependencies - // The actual deletion workflow is tested in integration tests where we can - // trigger the real handleDelete function via modal interactions - expect(garmApi.deleteEnterprise).toBeDefined(); - }); - - it('should redirect after successful deletion', async () => { - const { goto } = await import('$app/navigation'); - - render(EnterpriseDetailsPage); - - expect(goto).toBeDefined(); - }); - - it('should display error message when enterprise loading fails', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - // Simulate API error during enterprise loading - const error = new Error('Enterprise not found'); - (garmApi.getEnterprise as any).mockRejectedValue(error); - - const { container } = render(EnterpriseDetailsPage); - - // Wait for the component to handle the error - await new Promise(resolve => setTimeout(resolve, 100)); - - // Check that error message is displayed in the UI - const errorElement = container.querySelector('.bg-red-50, .bg-red-900'); - expect(errorElement).toBeInTheDocument(); - }); - }); - - describe('Instance Management', () => { - it('should have proper structure for instance deletion', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(EnterpriseDetailsPage); - - // Unit tests verify the component has access to the right dependencies - // The actual instance deletion workflow is tested in integration tests - expect(garmApi.deleteInstance).toBeDefined(); - }); - - it('should show success toast after instance deletion', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(EnterpriseDetailsPage); - - expect(toastStore.success).toBeDefined(); - }); - - it('should have proper error handling structure for instance deletion', async () => { - const { garmApi } = await import('$lib/api/client.js'); - const { toastStore } = await import('$lib/stores/toast.js'); - - render(EnterpriseDetailsPage); - - // Unit tests verify the component has access to the right dependencies - // Detailed error handling with UI interactions is tested in integration tests - expect(garmApi.deleteInstance).toBeDefined(); - expect(toastStore.error).toBeDefined(); - }); - }); - - describe('Pool Creation', () => { - it('should have proper structure for pool creation', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(EnterpriseDetailsPage); - - // Unit tests verify the component has access to the right dependencies - // The actual pool creation workflow is tested in integration tests where we can - // trigger the real handleCreatePool function via component events - expect(garmApi.createEnterprisePool).toBeDefined(); - }); - - it('should show success toast after pool creation', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(EnterpriseDetailsPage); - - expect(toastStore.success).toBeDefined(); - }); - - it('should have proper error handling structure for pool creation', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(EnterpriseDetailsPage); - - // Unit tests verify the component has access to the right dependencies - // The actual error re-throwing behavior is tested through integration tests - // where we can trigger the real handleCreatePool function via component events - expect(garmApi.createEnterprisePool).toBeDefined(); - }); - }); - - describe('WebSocket Event Handling', () => { - it('should have websocket subscription capabilities', async () => { - const { websocketStore } = await import('$lib/stores/websocket.js'); - - render(EnterpriseDetailsPage); - - // Verify websocket store is available and properly mocked - expect(websocketStore.subscribeToEntity).toBeDefined(); - }); - - it('should subscribe to enterprise events', async () => { - const { websocketStore } = await import('$lib/stores/websocket.js'); - const mockHandler = vi.fn(); - - render(EnterpriseDetailsPage); - - // Verify the subscription function is available - expect(websocketStore.subscribeToEntity).toBeDefined(); - }); - - it('should handle enterprise update events', async () => { - const { websocketStore } = await import('$lib/stores/websocket.js'); - - render(EnterpriseDetailsPage); - - // Wait for component mount and websocket subscription setup - await new Promise(resolve => setTimeout(resolve, 0)); - - // Verify the component subscribes to enterprise update and delete events - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith( - 'enterprise', - ['update', 'delete'], - expect.any(Function) - ); - }); - - it('should handle enterprise delete events', async () => { - const { websocketStore } = await import('$lib/stores/websocket.js'); - - render(EnterpriseDetailsPage); - - // Wait for component mount and websocket subscription setup - await new Promise(resolve => setTimeout(resolve, 0)); - - // Verify the component subscribes to enterprise delete events (same subscription as updates) - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith( - 'enterprise', - ['update', 'delete'], - expect.any(Function) - ); - }); - - it('should handle pool events', async () => { - const { websocketStore } = await import('$lib/stores/websocket.js'); - - render(EnterpriseDetailsPage); - - // Wait for component mount and websocket subscription setup - await new Promise(resolve => setTimeout(resolve, 0)); - - // Verify the component subscribes to pool create, update, and delete events - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith( - 'pool', - ['create', 'update', 'delete'], - expect.any(Function) - ); - }); - - it('should handle instance events', async () => { - const { websocketStore } = await import('$lib/stores/websocket.js'); - - render(EnterpriseDetailsPage); - - // Wait for component mount and websocket subscription setup - await new Promise(resolve => setTimeout(resolve, 0)); - - // Verify the component subscribes to instance create, update, and delete events - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith( - 'instance', - ['create', 'update', 'delete'], - expect.any(Function) - ); - }); - }); - - describe('Utility Functions', () => { - it('should have getForgeIcon utility available', async () => { - const { getForgeIcon } = await import('$lib/utils/common.js'); - - render(EnterpriseDetailsPage); - - expect(getForgeIcon).toBeDefined(); - }); - - it('should use forge icon for GitHub enterprises', async () => { - const { getForgeIcon } = await import('$lib/utils/common.js'); - - render(EnterpriseDetailsPage); - - expect(getForgeIcon).toBeDefined(); - }); - - it('should handle API error extraction', async () => { - const { extractAPIError } = await import('$lib/utils/apiError'); - const error = new Error('Test error'); - - render(EnterpriseDetailsPage); - - expect(extractAPIError).toBeDefined(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/enterprises/page.integration.test.ts b/webapp/src/routes/enterprises/page.integration.test.ts deleted file mode 100644 index 9da480de..00000000 --- a/webapp/src/routes/enterprises/page.integration.test.ts +++ /dev/null @@ -1,528 +0,0 @@ -import { describe, it, expect, beforeEach, vi } from 'vitest'; -import { render, screen, waitFor } from '@testing-library/svelte'; -import userEvent from '@testing-library/user-event'; -import { createMockEnterprise } from '../../test/factories.js'; - -// Create diverse test data for comprehensive testing -const mockEnterprises = [ - createMockEnterprise({ - id: 'ent-1', - name: 'test-enterprise', - pool_manager_status: { running: true, failure_reason: undefined } - }), - createMockEnterprise({ - id: 'ent-2', - name: 'github-enterprise', - pool_manager_status: { running: false, failure_reason: undefined } - }), - createMockEnterprise({ - id: 'ent-3', - name: 'another-enterprise', - pool_manager_status: { running: false, failure_reason: 'Connection failed' } - }) -]; - -const mockCredentials = [ - { name: 'github-creds' }, - { name: 'enterprise-creds' } -]; - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/PageHeader.svelte'); -vi.unmock('$lib/components/DataTable.svelte'); -vi.unmock('$lib/components/CreateEnterpriseModal.svelte'); -vi.unmock('$lib/components/UpdateEntityModal.svelte'); -vi.unmock('$lib/components/DeleteModal.svelte'); -vi.unmock('$lib/components/cells'); - -// Only mock the external APIs, not UI components -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - createEnterprise: vi.fn(), - updateEnterprise: vi.fn(), - deleteEnterprise: vi.fn(), - listEnterprises: vi.fn() - } -})); - -// Create a dynamic store that can be updated during tests -let mockStoreData = { - enterprises: mockEnterprises, - credentials: mockCredentials, - loaded: { enterprises: true, credentials: true }, - loading: { enterprises: false, credentials: false }, - errorMessages: { enterprises: '', credentials: '' } -}; - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback) => { - callback(mockStoreData); - return () => {}; - }) - }, - eagerCacheManager: { - getEnterprises: vi.fn(), - retryResource: vi.fn(), - getCredentials: vi.fn() - } -})); - -// Helper to update mock store data -function updateMockStore(updates: Partial) { - mockStoreData = { ...mockStoreData, ...updates }; -} - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn(), - warning: vi.fn() - } -})); - -// Import the enterprises page without any UI component mocks -import EnterprisesPage from './+page.svelte'; - -describe('Comprehensive Integration Tests for Enterprises Page', () => { - let garmApi: any; - - beforeEach(async () => { - vi.clearAllMocks(); - // Reset mock store data - mockStoreData = { - enterprises: mockEnterprises, - credentials: mockCredentials, - loaded: { enterprises: true, credentials: true }, - loading: { enterprises: false, credentials: false }, - errorMessages: { enterprises: '', credentials: '' } - }; - - const apiClient = await import('$lib/api/client.js'); - garmApi = apiClient.garmApi; - - garmApi.createEnterprise.mockResolvedValue({ id: 'new-ent', name: 'new-ent' }); - garmApi.updateEnterprise.mockResolvedValue({}); - garmApi.deleteEnterprise.mockResolvedValue({}); - }); - - describe('Component Rendering and Basic Structure', () => { - it('should render enterprises page with multiple enterprises', async () => { - const { container } = render(EnterprisesPage); - - // Verify page title and header - expect(screen.getByText('Enterprises')).toBeInTheDocument(); - expect(screen.getByText('Manage GitHub enterprises')).toBeInTheDocument(); - - // Verify all enterprises are rendered (use getAllByText for duplicates) - expect(screen.getAllByText('test-enterprise')[0]).toBeInTheDocument(); - expect(screen.getAllByText('github-enterprise')[0]).toBeInTheDocument(); - expect(screen.getAllByText('another-enterprise')[0]).toBeInTheDocument(); - - // Verify action buttons are present - const editButtons = container.querySelectorAll('[title="Edit"], [title="Edit enterprise"]'); - const deleteButtons = container.querySelectorAll('[title="Delete"], [title="Delete enterprise"]'); - expect(editButtons.length).toBeGreaterThan(0); - expect(deleteButtons.length).toBeGreaterThan(0); - }); - - it('should display correct forge icons for enterprise types', async () => { - const { container } = render(EnterprisesPage); - - // GitHub enterprises should have GitHub icons - const githubIcons = container.querySelectorAll('svg'); - expect(githubIcons.length).toBeGreaterThan(0); - - // Verify endpoint names are displayed (use getAllByText for duplicates in responsive layouts) - expect(screen.getAllByText('github.com')[0]).toBeInTheDocument(); - }); - - it('should display enterprise status correctly', async () => { - const { container } = render(EnterprisesPage); - - // Verify status information is displayed for enterprises - // Look for any status-related elements in the table - const tableElements = container.querySelectorAll('td, div'); - expect(tableElements.length).toBeGreaterThan(0); - - // Enterprises page should render with status information - expect(screen.getByText('Enterprises')).toBeInTheDocument(); - }); - - it('should have clickable enterprise links', async () => { - const { container } = render(EnterprisesPage); - - // Verify enterprise names are links - const entLinks = container.querySelectorAll('a[href^="/enterprises/"]'); - expect(entLinks.length).toBeGreaterThan(0); - - // Check specific enterprise links - const ent1Link = container.querySelector('a[href="/enterprises/ent-1"]'); - expect(ent1Link).toBeInTheDocument(); - expect(ent1Link?.textContent?.includes('test-enterprise')).toBe(true); - }); - }); - - describe('Search and Filtering Functionality', () => { - it('should filter enterprises by search term', async () => { - const user = userEvent.setup(); - render(EnterprisesPage); - - // Find search input - const searchInput = screen.getByPlaceholderText('Search enterprises...'); - expect(searchInput).toBeInTheDocument(); - - // Search for 'github' - should filter to only github enterprise - await user.type(searchInput, 'github'); - - // Wait for filtering to take effect - await waitFor(() => { - // Should still show github enterprise (may appear multiple times in responsive layout) - expect(screen.getAllByText('github-enterprise')[0]).toBeInTheDocument(); - }); - }); - - it('should clear search when input is cleared', async () => { - const user = userEvent.setup(); - render(EnterprisesPage); - - const searchInput = screen.getByPlaceholderText('Search enterprises...'); - - // Type search term - await user.type(searchInput, 'github'); - - // Clear search - await user.clear(searchInput); - - // All enterprises should be visible again - await waitFor(() => { - expect(screen.getAllByText('test-enterprise')[0]).toBeInTheDocument(); - expect(screen.getAllByText('github-enterprise')[0]).toBeInTheDocument(); - expect(screen.getAllByText('another-enterprise')[0]).toBeInTheDocument(); - }); - }); - - it('should show no results when search matches nothing', async () => { - const user = userEvent.setup(); - render(EnterprisesPage); - - const searchInput = screen.getByPlaceholderText('Search enterprises...'); - - // Search for something that doesn't exist - await user.type(searchInput, 'nonexistent-enterprise'); - - // Should show empty state or filtered results - await waitFor(() => { - // Search input should contain the search term - expect(searchInput).toHaveValue('nonexistent-enterprise'); - // Component should handle empty search results gracefully - expect(screen.getByText('Enterprises')).toBeInTheDocument(); - }); - }); - }); - - describe('Pagination Controls', () => { - it('should display pagination controls with correct options', async () => { - render(EnterprisesPage); - - // Find per-page selector - const perPageSelect = screen.getByLabelText('Show:'); - expect(perPageSelect).toBeInTheDocument(); - - // Verify options are available - expect(screen.getByText('25')).toBeInTheDocument(); - expect(screen.getByText('50')).toBeInTheDocument(); - expect(screen.getByText('100')).toBeInTheDocument(); - }); - - it('should allow changing items per page', async () => { - const user = userEvent.setup(); - render(EnterprisesPage); - - const perPageSelect = screen.getByLabelText('Show:'); - - // Change to 50 items per page - await user.selectOptions(perPageSelect, '50'); - - // Verify selection changed - expect(perPageSelect).toHaveValue('50'); - }); - }); - - describe('Modal Interactions', () => { - it('should open create enterprise modal when add button is clicked', async () => { - const user = userEvent.setup(); - render(EnterprisesPage); - - // Find and click the "Add Enterprise" button - const addButton = screen.getByText('Add Enterprise'); - expect(addButton).toBeInTheDocument(); - - await user.click(addButton); - - // Modal should open (depending on implementation) - // This tests that the button is properly wired up - expect(addButton).toBeInTheDocument(); - }); - - it('should open edit modal when edit button is clicked', async () => { - const user = userEvent.setup(); - const { container } = render(EnterprisesPage); - - // Find edit button for first enterprise - const editButtons = container.querySelectorAll('[title="Edit"], [title="Edit enterprise"]'); - expect(editButtons.length).toBeGreaterThan(0); - - const firstEditButton = editButtons[0] as HTMLElement; - - // Test that button is clickable (button may be replaced by modal) - await user.click(firstEditButton); - - // Verify the click interaction completed successfully - // (Modal may have opened, so button might not be accessible) - // The important thing is the click didn't cause errors - expect(screen.getByText('Enterprises')).toBeInTheDocument(); - }); - - it('should open delete modal when delete button is clicked', async () => { - const user = userEvent.setup(); - const { container } = render(EnterprisesPage); - - // Find delete button for first enterprise - const deleteButtons = container.querySelectorAll('[title="Delete"], [title="Delete enterprise"]'); - expect(deleteButtons.length).toBeGreaterThan(0); - - const firstDeleteButton = deleteButtons[0] as HTMLElement; - - // Test that button is clickable (button may be replaced by modal) - await user.click(firstDeleteButton); - - // Verify the click interaction completed successfully - // (Modal may have opened, so button might not be accessible) - // The important thing is the click didn't cause errors - expect(screen.getByText('Enterprises')).toBeInTheDocument(); - }); - }); - - describe('Error States and Loading States', () => { - it('should handle loading state correctly', async () => { - // Update mock store to show loading state - updateMockStore({ - loading: { enterprises: true, credentials: false }, - loaded: { enterprises: false, credentials: true }, - enterprises: [] - }); - - render(EnterprisesPage); - - // Component should still render basic structure during loading - expect(screen.getByText('Enterprises')).toBeInTheDocument(); - expect(screen.getByText('Manage GitHub enterprises')).toBeInTheDocument(); - expect(screen.getByText('Add Enterprise')).toBeInTheDocument(); - }); - - it('should handle error state correctly', async () => { - // Update mock store to show error state - updateMockStore({ - errorMessages: { enterprises: 'Failed to load enterprises', credentials: '' }, - loaded: { enterprises: false, credentials: true }, - enterprises: [] - }); - - render(EnterprisesPage); - - // Component should still render page structure even with errors - expect(screen.getByText('Enterprises')).toBeInTheDocument(); - expect(screen.getByText('Add Enterprise')).toBeInTheDocument(); - // Should render gracefully without crashing - expect(screen.getByText('Manage GitHub enterprises')).toBeInTheDocument(); - }); - - it('should handle empty enterprise list', async () => { - // Update mock store to have no enterprises - updateMockStore({ - enterprises: [], - loaded: { enterprises: true, credentials: true } - }); - - render(EnterprisesPage); - - // Should still render page structure - expect(screen.getByText('Enterprises')).toBeInTheDocument(); - expect(screen.getByText('Add Enterprise')).toBeInTheDocument(); - }); - }); - - describe('Component Integration and Data Flow', () => { - it('should render consistent UI based on component state', async () => { - render(EnterprisesPage); - - // Component should display all enterprises from initial state - expect(screen.getAllByText('test-enterprise')[0]).toBeInTheDocument(); - expect(screen.getAllByText('github-enterprise')[0]).toBeInTheDocument(); - expect(screen.getAllByText('another-enterprise')[0]).toBeInTheDocument(); - - // Should show GitHub endpoints (enterprises are GitHub only) - expect(screen.getAllByText('github.com')[0]).toBeInTheDocument(); - }); - - it('should properly subscribe to eager cache on component mount', async () => { - render(EnterprisesPage); - - // Verify component subscribes to and displays cache data - expect(screen.getAllByText('test-enterprise')[0]).toBeInTheDocument(); - expect(screen.getAllByText('github-enterprise')[0]).toBeInTheDocument(); - expect(screen.getAllByText('another-enterprise')[0]).toBeInTheDocument(); - - // Verify enterprises from GitHub endpoints are displayed - expect(screen.getAllByText('github.com')[0]).toBeInTheDocument(); - - // Verify component renders the correct number of enterprises in the UI - // (This tests actual component rendering, not our mock setup) - const entLinks = document.querySelectorAll('a[href^="/enterprises/"]'); - expect(entLinks.length).toBeGreaterThan(0); - }); - - it('should handle different data states gracefully', async () => { - // Test with empty data state - updateMockStore({ - enterprises: [], - loaded: { enterprises: true, credentials: true } - }); - - render(EnterprisesPage); - - // Component should render gracefully with no enterprises - expect(screen.getByText('Enterprises')).toBeInTheDocument(); - expect(screen.getByText('Add Enterprise')).toBeInTheDocument(); - - // Should still show the data table structure - expect(document.body).toBeInTheDocument(); - }); - }); - - describe('Responsive Design and Accessibility', () => { - it('should render mobile and desktop layouts', async () => { - const { container } = render(EnterprisesPage); - - // Check for responsive classes - const mobileView = container.querySelector('.block.sm\\:hidden'); - const desktopView = container.querySelector('.hidden.sm\\:block'); - - // Both mobile and desktop views should be present - expect(mobileView || desktopView).toBeInTheDocument(); - }); - - it('should have proper accessibility attributes', async () => { - const { container } = render(EnterprisesPage); - - // Check for ARIA labels and titles - const buttonsWithAria = container.querySelectorAll('[aria-label], [title]'); - expect(buttonsWithAria.length).toBeGreaterThan(0); - - // Check for proper form labels - search input should be accessible - const searchInput = screen.getByPlaceholderText('Search enterprises...'); - expect(searchInput).toBeInTheDocument(); - - // Check for screen reader label - const searchLabel = container.querySelector('label[for="search"]'); - expect(searchLabel).toBeInTheDocument(); - }); - }); - - describe('User Interaction Flows', () => { - it('should support keyboard navigation', async () => { - const user = userEvent.setup(); - render(EnterprisesPage); - - // Test tab navigation through interactive elements - const searchInput = screen.getByPlaceholderText('Search enterprises...'); - - // Click to focus first, then test tab navigation - await user.click(searchInput); - expect(searchInput).toHaveFocus(); - - // Tab should move focus to next element - await user.tab(); - }); - - it('should handle rapid user interactions', async () => { - const user = userEvent.setup(); - render(EnterprisesPage); - - // Rapid clicking should not break the UI - const addButton = screen.getByText('Add Enterprise'); - - // Click multiple times rapidly - await user.click(addButton); - await user.click(addButton); - await user.click(addButton); - - // Component should remain stable - expect(addButton).toBeInTheDocument(); - }); - - it('should handle concurrent search and pagination changes', async () => { - const user = userEvent.setup(); - render(EnterprisesPage); - - const searchInput = screen.getByPlaceholderText('Search enterprises...'); - const perPageSelect = screen.getByLabelText('Show:'); - - // Perform search and pagination changes simultaneously - await user.type(searchInput, 'test'); - await user.selectOptions(perPageSelect, '50'); - - // Both changes should be applied - expect(searchInput).toHaveValue('test'); - expect(perPageSelect).toHaveValue('50'); - }); - }); - - describe('Data Consistency and State Management', () => { - it('should maintain UI consistency during user operations', async () => { - const user = userEvent.setup(); - render(EnterprisesPage); - - // Initial UI should show all enterprises - expect(screen.getAllByText('test-enterprise')[0]).toBeInTheDocument(); - expect(screen.getAllByText('github-enterprise')[0]).toBeInTheDocument(); - expect(screen.getAllByText('another-enterprise')[0]).toBeInTheDocument(); - - // User interactions should not break the UI consistency - const addButton = screen.getByText('Add Enterprise'); - await user.click(addButton); - - // Page should remain stable after interactions - expect(screen.getByText('Enterprises')).toBeInTheDocument(); - }); - - it('should maintain UI consistency during state changes', async () => { - render(EnterprisesPage); - - // Initially should show all enterprises - expect(screen.getAllByText('test-enterprise')[0]).toBeInTheDocument(); - - // Component should handle state transitions gracefully - // (In real app, Svelte reactivity would update UI when store changes) - expect(screen.getByText('Enterprises')).toBeInTheDocument(); - expect(screen.getByText('Add Enterprise')).toBeInTheDocument(); - }); - - it('should display enterprise types correctly in UI', async () => { - const { container } = render(EnterprisesPage); - - // Should display GitHub enterprises in the UI (enterprises are GitHub only) - expect(screen.getAllByText('github.com')[0]).toBeInTheDocument(); - - // Should show enterprise names - expect(screen.getAllByText('test-enterprise')[0]).toBeInTheDocument(); - expect(screen.getAllByText('github-enterprise')[0]).toBeInTheDocument(); - - // Should have appropriate forge icons for GitHub - const svgIcons = container.querySelectorAll('svg'); - expect(svgIcons.length).toBeGreaterThan(0); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/enterprises/page.render.test.ts b/webapp/src/routes/enterprises/page.render.test.ts deleted file mode 100644 index 85163b67..00000000 --- a/webapp/src/routes/enterprises/page.render.test.ts +++ /dev/null @@ -1,173 +0,0 @@ -import { describe, it, expect, beforeEach, vi } from 'vitest'; -import { render, screen } from '@testing-library/svelte'; -import { createMockEnterprise } from '../../test/factories.js'; - -// Mock all external dependencies but keep the component rendering real -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - createEnterprise: vi.fn(), - updateEnterprise: vi.fn(), - deleteEnterprise: vi.fn(), - listEnterprises: vi.fn() - } -})); - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback) => { - callback({ - enterprises: [], - credentials: [], - loaded: { enterprises: true, credentials: true }, - loading: { enterprises: false, credentials: false }, - errorMessages: { enterprises: '', credentials: '' } - }); - return () => {}; - }) - }, - eagerCacheManager: { - getEnterprises: vi.fn(), - retryResource: vi.fn(), - getCredentials: vi.fn() - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn(), - warning: vi.fn() - } -})); - -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path) => path) -})); - -vi.mock('$app/environment', () => ({ - browser: false, - dev: true, - building: false -})); - -vi.mock('$lib/components/CreateEnterpriseModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/UpdateEntityModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/DeleteModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/PageHeader.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/DataTable.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/Badge.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/ActionButton.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/cells', () => ({ - EntityCell: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })), - EndpointCell: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })), - StatusCell: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })), - ActionsCell: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })), - GenericCell: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/utils/common.js', () => ({ - getForgeIcon: vi.fn((type) => ``), - getEntityStatusBadge: vi.fn(() => ({ variant: 'success', text: 'Running' })), - filterByName: vi.fn((items, term) => - term ? items.filter((item: any) => - item.name.toLowerCase().includes(term.toLowerCase()) - ) : items - ) -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((error) => error.message || 'API Error') -})); - -import EnterprisesPage from './+page.svelte'; - -describe('Enterprises Page Rendering Tests', () => { - beforeEach(() => { - vi.clearAllMocks(); - }); - - describe('Component Rendering', () => { - it('should render without crashing', () => { - const { container } = render(EnterprisesPage); - expect(container).toBeInTheDocument(); - }); - - it('should render as a valid DOM element', () => { - const { container } = render(EnterprisesPage); - expect(container.firstChild).toBeInstanceOf(HTMLElement); - }); - - it('should have proper document title', () => { - render(EnterprisesPage); - expect(document.title).toBe('Enterprises - GARM'); - }); - - it('should render with correct structure', () => { - const { container } = render(EnterprisesPage); - expect(container.firstChild).toHaveClass('space-y-6'); - }); - - it('should handle empty state rendering', () => { - render(EnterprisesPage); - // Component should render even with no enterprises - expect(document.body).toBeInTheDocument(); - }); - }); - - describe('Component Lifecycle', () => { - it('should mount successfully', () => { - const component = render(EnterprisesPage); - expect(component.component).toBeDefined(); - }); - - it('should unmount without errors', () => { - const { unmount } = render(EnterprisesPage); - expect(() => unmount()).not.toThrow(); - }); - }); - - describe('DOM Structure Validation', () => { - it('should create proper HTML structure', () => { - const { container } = render(EnterprisesPage); - - // Should have main container - expect(container.querySelector('.space-y-6')).toBeInTheDocument(); - }); - - it('should handle conditional rendering', () => { - const { container } = render(EnterprisesPage); - - // Component should render without any modals open initially - expect(container).toBeInTheDocument(); - }); - - it('should render with proper accessibility structure', () => { - const { container } = render(EnterprisesPage); - - // Basic accessibility checks - expect(container).toBeInTheDocument(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/enterprises/page.test.ts b/webapp/src/routes/enterprises/page.test.ts deleted file mode 100644 index d6697ff8..00000000 --- a/webapp/src/routes/enterprises/page.test.ts +++ /dev/null @@ -1,522 +0,0 @@ -import { describe, it, expect, beforeEach, vi } from 'vitest'; -import { render, screen } from '@testing-library/svelte'; -import userEvent from '@testing-library/user-event'; -import { createMockEnterprise } from '../../test/factories.js'; - -// Mock all external dependencies -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - createEnterprise: vi.fn(), - updateEnterprise: vi.fn(), - deleteEnterprise: vi.fn(), - listEnterprises: vi.fn() - } -})); - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback) => { - callback({ - enterprises: [], - credentials: [], - loaded: { enterprises: true, credentials: true }, - loading: { enterprises: false, credentials: false }, - errorMessages: { enterprises: '', credentials: '' } - }); - return () => {}; - }) - }, - eagerCacheManager: { - getEnterprises: vi.fn(), - retryResource: vi.fn(), - getCredentials: vi.fn() - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn(), - warning: vi.fn() - } -})); - -// Mock SvelteKit modules -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path) => path) -})); - -vi.mock('$app/environment', () => ({ - browser: false, - dev: true, - building: false -})); - -// Mock all child components -vi.mock('$lib/components/CreateEnterpriseModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/UpdateEntityModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/DeleteModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/PageHeader.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/DataTable.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/Badge.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/ActionButton.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/cells', () => ({ - EntityCell: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })), - EndpointCell: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })), - StatusCell: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })), - ActionsCell: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })), - GenericCell: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/utils/common.js', () => ({ - getForgeIcon: vi.fn((type) => ``), - getEntityStatusBadge: vi.fn(() => ({ variant: 'success', text: 'Running' })), - filterByName: vi.fn((items, term) => - term ? items.filter((item: any) => - item.name.toLowerCase().includes(term.toLowerCase()) - ) : items - ) -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((error) => error.message || 'API Error') -})); - -import EnterprisesPage from './+page.svelte'; - -describe('Enterprises Page Unit Tests', () => { - let mockEnterprises: any[]; - - beforeEach(() => { - vi.clearAllMocks(); - mockEnterprises = [ - createMockEnterprise({ - id: 'ent-1', - name: 'test-enterprise', - pool_manager_status: { running: true, failure_reason: undefined } - }), - createMockEnterprise({ - id: 'ent-2', - name: 'another-enterprise', - pool_manager_status: { running: false, failure_reason: undefined } - }) - ]; - }); - - describe('Component Structure', () => { - it('should render enterprises page', () => { - const { container } = render(EnterprisesPage); - expect(container).toBeInTheDocument(); - }); - - it('should set correct page title', () => { - render(EnterprisesPage); - expect(document.title).toBe('Enterprises - GARM'); - }); - - it('should have enterprises state variables', async () => { - const component = render(EnterprisesPage); - expect(component).toBeDefined(); - }); - }); - - describe('Data Management', () => { - it('should initialize with correct default values', () => { - const { container } = render(EnterprisesPage); - // Component should render without errors and set up initial state - expect(container).toBeInTheDocument(); - expect(document.title).toBe('Enterprises - GARM'); - }); - - it('should handle enterprises data from eager cache', () => { - const { container } = render(EnterprisesPage); - // Component should render structure for handling cache data - expect(container.querySelector('.space-y-6')).toBeInTheDocument(); - }); - }); - - describe('Search and Filtering', () => { - it('should filter enterprises by search term', async () => { - const { filterByName } = await import('$lib/utils/common.js'); - - const filtered = filterByName(mockEnterprises, 'test'); - expect(filterByName).toHaveBeenCalledWith(mockEnterprises, 'test'); - expect(filtered).toHaveLength(1); - expect(filtered[0].name).toBe('test-enterprise'); - }); - - it('should return all enterprises when search term is empty', async () => { - const { filterByName } = await import('$lib/utils/common.js'); - - const filtered = filterByName(mockEnterprises, ''); - expect(filterByName).toHaveBeenCalledWith(mockEnterprises, ''); - expect(filtered).toHaveLength(2); - }); - - it('should handle case-insensitive search', async () => { - const { filterByName } = await import('$lib/utils/common.js'); - - filterByName(mockEnterprises, 'TEST'); - expect(filterByName).toHaveBeenCalledWith(mockEnterprises, 'TEST'); - }); - - it('should reset to first page when searching', () => { - render(EnterprisesPage); - // Component should reset currentPage to 1 when search term changes - expect(document.title).toBe('Enterprises - GARM'); - }); - }); - - describe('Pagination Logic', () => { - it('should calculate total pages correctly', () => { - const enterprises = Array(75).fill(null).map((_, i) => - createMockEnterprise({ id: `ent-${i}`, name: `ent-${i}` }) - ); - const perPage = 25; - const totalPages = Math.ceil(enterprises.length / perPage); - expect(totalPages).toBe(3); - }); - - it('should calculate paginated enterprises correctly', () => { - const enterprises = Array(75).fill(null).map((_, i) => - createMockEnterprise({ id: `ent-${i}`, name: `ent-${i}` }) - ); - const currentPage = 2; - const perPage = 25; - const start = (currentPage - 1) * perPage; - const paginatedEnterprises = enterprises.slice(start, start + perPage); - - expect(paginatedEnterprises).toHaveLength(25); - expect(paginatedEnterprises[0].name).toBe('ent-25'); - expect(paginatedEnterprises[24].name).toBe('ent-49'); - }); - - it('should adjust current page when it exceeds total pages', () => { - // When filtering reduces results, current page should adjust - const totalPages = 2; - let currentPage = 5; - - if (currentPage > totalPages && totalPages > 0) { - currentPage = totalPages; - } - - expect(currentPage).toBe(2); - }); - - it('should handle empty results gracefully', () => { - const enterprises: any[] = []; - const perPage = 25; - const totalPages = Math.ceil(enterprises.length / perPage); - expect(totalPages).toBe(0); - }); - }); - - describe('Modal Management', () => { - it('should have correct initial modal states', () => { - render(EnterprisesPage); - // Component should render without modal states - expect(document.title).toBe('Enterprises - GARM'); - }); - - it('should handle create modal opening', () => { - render(EnterprisesPage); - // Component should handle modal state management - expect(document.title).toBe('Enterprises - GARM'); - }); - - it('should handle update modal opening with enterprise', () => { - render(EnterprisesPage); - // Component should handle update modal state - expect(document.title).toBe('Enterprises - GARM'); - }); - - it('should handle delete modal opening with enterprise', () => { - render(EnterprisesPage); - // Component should handle delete modal state - expect(document.title).toBe('Enterprises - GARM'); - }); - - it('should close all modals', () => { - render(EnterprisesPage); - // Component should handle modal closing - expect(document.title).toBe('Enterprises - GARM'); - }); - }); - - describe('API Integration', () => { - it('should call createEnterprise API', async () => { - const { garmApi } = await import('$lib/api/client.js'); - render(EnterprisesPage); - - const entParams = { - name: 'new-enterprise', - credentials_name: 'test-creds', - webhook_secret: 'secret123', - pool_balancer_type: 'roundrobin' - }; - - await garmApi.createEnterprise(entParams); - expect(garmApi.createEnterprise).toHaveBeenCalledWith(entParams); - }); - - it('should call updateEnterprise API', async () => { - const { garmApi } = await import('$lib/api/client.js'); - render(EnterprisesPage); - - const updateParams = { webhook_secret: 'new-secret' }; - await garmApi.updateEnterprise('ent-1', updateParams); - expect(garmApi.updateEnterprise).toHaveBeenCalledWith('ent-1', updateParams); - }); - - it('should call deleteEnterprise API', async () => { - const { garmApi } = await import('$lib/api/client.js'); - render(EnterprisesPage); - - await garmApi.deleteEnterprise('ent-1'); - expect(garmApi.deleteEnterprise).toHaveBeenCalledWith('ent-1'); - }); - }); - - describe('Toast Notifications', () => { - it('should show success toast for enterprise creation', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - render(EnterprisesPage); - - toastStore.success('Enterprise Created', 'Enterprise test-enterprise has been created successfully.'); - expect(toastStore.success).toHaveBeenCalledWith( - 'Enterprise Created', - 'Enterprise test-enterprise has been created successfully.' - ); - }); - - it('should show success toast for enterprise update', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - render(EnterprisesPage); - - toastStore.success('Enterprise Updated', 'Enterprise test-enterprise has been updated successfully.'); - expect(toastStore.success).toHaveBeenCalledWith( - 'Enterprise Updated', - 'Enterprise test-enterprise has been updated successfully.' - ); - }); - - it('should show success toast for enterprise deletion', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - render(EnterprisesPage); - - toastStore.success('Enterprise Deleted', 'Enterprise test-enterprise has been deleted successfully.'); - expect(toastStore.success).toHaveBeenCalledWith( - 'Enterprise Deleted', - 'Enterprise test-enterprise has been deleted successfully.' - ); - }); - - it('should show error toast for API failures', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - render(EnterprisesPage); - - toastStore.error('Delete Failed', 'Enterprise deletion failed'); - expect(toastStore.error).toHaveBeenCalledWith('Delete Failed', 'Enterprise deletion failed'); - }); - }); - - describe('DataTable Configuration', () => { - it('should have correct column configuration', () => { - render(EnterprisesPage); - - // DataTable should be configured with proper columns - const expectedColumns = [ - { key: 'name', title: 'Name' }, - { key: 'endpoint', title: 'Endpoint' }, - { key: 'credentials', title: 'Credentials' }, - { key: 'status', title: 'Status' }, - { key: 'actions', title: 'Actions', align: 'right' } - ]; - - expect(expectedColumns).toHaveLength(5); - }); - - it('should have correct mobile card configuration', () => { - render(EnterprisesPage); - - // Mobile card should be configured for enterprises - const config = { - entityType: 'enterprise', - primaryText: { field: 'name', isClickable: true, href: '/enterprises/{id}' } - }; - - expect(config.entityType).toBe('enterprise'); - expect(config.primaryText.field).toBe('name'); - expect(config.primaryText.isClickable).toBe(true); - }); - }); - - describe('Event Handlers', () => { - it('should handle table search event', () => { - render(EnterprisesPage); - - // handleTableSearch should update searchTerm and reset page - const mockEvent = { detail: { term: 'test-search' } }; - expect(mockEvent.detail.term).toBe('test-search'); - }); - - it('should handle table page change event', () => { - render(EnterprisesPage); - - // handleTablePageChange should update currentPage - const mockEvent = { detail: { page: 3 } }; - expect(mockEvent.detail.page).toBe(3); - }); - - it('should handle table per-page change event', () => { - render(EnterprisesPage); - - // handleTablePerPageChange should update perPage and reset page - const mockEvent = { detail: { perPage: 50 } }; - expect(mockEvent.detail.perPage).toBe(50); - }); - - it('should handle edit action event', () => { - render(EnterprisesPage); - - // handleEdit should call openUpdateModal - const mockEnterprise = createMockEnterprise(); - const mockEvent = { detail: { item: mockEnterprise } }; - expect(mockEvent.detail.item).toBe(mockEnterprise); - }); - - it('should handle delete action event', () => { - render(EnterprisesPage); - - // handleDelete should call openDeleteModal - const mockEnterprise = createMockEnterprise(); - const mockEvent = { detail: { item: mockEnterprise } }; - expect(mockEvent.detail.item).toBe(mockEnterprise); - }); - }); - - describe('Error Handling', () => { - it('should handle API errors in enterprise creation', async () => { - const { extractAPIError } = await import('$lib/utils/apiError'); - render(EnterprisesPage); - - const error = new Error('Creation failed'); - const extractedError = extractAPIError(error); - expect(extractAPIError).toHaveBeenCalledWith(error); - expect(extractedError).toBe('Creation failed'); - }); - - it('should handle enterprises loading errors', () => { - render(EnterprisesPage); - - // Component should render without errors during error states - expect(document.title).toBe('Enterprises - GARM'); - }); - - it('should handle retry functionality', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - render(EnterprisesPage); - - await eagerCacheManager.retryResource('enterprises'); - expect(eagerCacheManager.retryResource).toHaveBeenCalledWith('enterprises'); - }); - }); - - describe('Utility Functions', () => { - it('should get correct forge icon', async () => { - const { getForgeIcon } = await import('$lib/utils/common.js'); - - const githubIcon = getForgeIcon('github'); - - expect(getForgeIcon).toHaveBeenCalledWith('github'); - expect(githubIcon).toContain('svg'); - }); - - it('should get entity status badge', async () => { - const { getEntityStatusBadge } = await import('$lib/utils/common.js'); - - const enterprise = createMockEnterprise({ - pool_manager_status: { running: true, failure_reason: undefined } - }); - - const badge = getEntityStatusBadge(enterprise); - expect(getEntityStatusBadge).toHaveBeenCalledWith(enterprise); - expect(badge).toEqual({ variant: 'success', text: 'Running' }); - }); - }); - - describe('Reactive Statements', () => { - it('should update filtered enterprises when search term changes', () => { - render(EnterprisesPage); - - // Component should handle reactive filtering - expect(document.title).toBe('Enterprises - GARM'); - }); - - it('should recalculate total pages when filtered enterprises change', () => { - render(EnterprisesPage); - - // Component should handle reactive pagination - expect(document.title).toBe('Enterprises - GARM'); - }); - - it('should adjust current page when total pages change', () => { - render(EnterprisesPage); - - // Component should handle page adjustments - expect(document.title).toBe('Enterprises - GARM'); - }); - - it('should update paginated enterprises when page or filter changes', () => { - render(EnterprisesPage); - - // Component should handle reactive pagination updates - expect(document.title).toBe('Enterprises - GARM'); - }); - }); - - describe('Lifecycle Management', () => { - it('should load enterprises on mount', () => { - render(EnterprisesPage); - - // Component should load without errors on mount - expect(document.title).toBe('Enterprises - GARM'); - }); - - it('should handle mount errors gracefully', () => { - render(EnterprisesPage); - - // Component should handle mount errors gracefully - expect(document.title).toBe('Enterprises - GARM'); - }); - - it('should subscribe to eager cache', () => { - render(EnterprisesPage); - - // Component should set up cache subscription - expect(document.title).toBe('Enterprises - GARM'); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/init/+page.svelte b/webapp/src/routes/init/+page.svelte deleted file mode 100644 index 848beba6..00000000 --- a/webapp/src/routes/init/+page.svelte +++ /dev/null @@ -1,432 +0,0 @@ - - - - Initialize GARM - First Run Setup - - -
                -
                -
                - GARM - -
                -

                - Welcome to GARM -

                -

                - Complete the first-run setup to get started -

                -
                - -
                - -
                -
                -
                - - - -
                -
                -

                - First-Run Initialization -

                -
                -

                GARM needs to be initialized before first use. This will create the admin user and generate a unique controller ID for this installation.

                -
                -
                -
                -
                - - -
                -
                - -
                - -
                - - {#if !isValidUsername && username.length > 0} -

                - Username is required -

                - {/if} -
                -
                - - -
                - -
                - - {#if !isValidEmail && email.length > 0} -

                - Please enter a valid email address -

                - {/if} -
                -
                - - -
                - -
                - - {#if !isValidFullName && fullName.length > 0} -

                - Full name is required -

                - {/if} -
                -
                - - -
                - -
                - - {#if !isValidPassword && password.length > 0} -

                - Password must be at least 8 characters long -

                - {/if} -
                -
                - - -
                - -
                - - {#if !isValidConfirmPassword && confirmPassword.length > 0} -

                - Passwords do not match -

                - {/if} -
                -
                - - -
                - - - {#if showAdvanced} -
                -
                - -
                - -
                - -

                - URL where runners can fetch metadata and setup information. -

                -
                -
                - - -
                - -
                - -

                - URL where runners send status updates and lifecycle events. -

                -
                -
                - - -
                - -
                - -

                - URL where GitHub/Gitea will send webhook events for job notifications. -

                -
                -
                -
                -
                - {/if} -
                - - - {#if !isValid && (username.length > 0 || email.length > 0 || fullName.length > 0 || password.length > 0 || confirmPassword.length > 0)} -
                -
                -
                - - - -
                -
                -

                - Please complete all required fields -

                -
                -
                  - {#if !isValidUsername} -
                • Enter a username
                • - {/if} - {#if !isValidEmail} -
                • Enter a valid email address
                • - {/if} - {#if !isValidFullName} -
                • Enter your full name
                • - {/if} - {#if !isValidPassword} -
                • Enter a password with at least 8 characters
                • - {/if} - {#if !isValidConfirmPassword} -
                • Confirm your password
                • - {/if} -
                -
                -
                -
                -
                - {/if} - - - {#if error} -
                -
                -
                - - - -
                -
                -

                {error}

                -
                -
                -
                - {/if} - - -
                - -
                -
                - - -
                -
                -

                - This will create the admin user, generate a unique controller ID, and configure the required URLs for your GARM installation. -
                - Make sure to remember these credentials as they cannot be recovered. -

                -
                -
                -
                -
                -
                \ No newline at end of file diff --git a/webapp/src/routes/init/page.integration.test.ts b/webapp/src/routes/init/page.integration.test.ts deleted file mode 100644 index 5f5ff885..00000000 --- a/webapp/src/routes/init/page.integration.test.ts +++ /dev/null @@ -1,963 +0,0 @@ -import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; -import { render, screen, waitFor, fireEvent } from '@testing-library/svelte'; -import InitPage from './+page.svelte'; - -// Helper function to create complete AuthState objects -function createMockAuthState(overrides: any = {}) { - return { - isAuthenticated: false, - user: null, - loading: false, - needsInitialization: true, - ...overrides - }; -} - -// Mock app stores and navigation -vi.mock('$app/navigation', () => ({ - goto: vi.fn() -})); - -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path: string) => path) -})); - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/Button.svelte'); - -// Only mock the auth store and API -vi.mock('$lib/stores/auth.js', () => ({ - authStore: { - subscribe: vi.fn((callback: (state: any) => void) => { - callback(createMockAuthState()); - return () => {}; - }) - }, - auth: { - initialize: vi.fn() - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn() - } -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -// Global setup for each test -let auth: any; -let authStore: any; -let goto: any; -let resolve: any; -let toastStore: any; -let extractAPIError: any; - -describe('Comprehensive Integration Tests for Init Page', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up API mocks with default successful responses - const authModule = await import('$lib/stores/auth.js'); - auth = authModule.auth; - authStore = authModule.authStore; - - const navigationModule = await import('$app/navigation'); - goto = navigationModule.goto; - - const pathsModule = await import('$app/paths'); - resolve = pathsModule.resolve; - - const toastModule = await import('$lib/stores/toast.js'); - toastStore = toastModule.toastStore; - - const apiErrorModule = await import('$lib/utils/apiError'); - extractAPIError = apiErrorModule.extractAPIError; - - (auth.initialize as any).mockResolvedValue({}); - (resolve as any).mockImplementation((path: string) => path); - (extractAPIError as any).mockImplementation((err: any) => err.message || 'Unknown error'); - - // Mock window.location for URL auto-population - Object.defineProperty(window, 'location', { - value: { - origin: 'https://garm.example.com' - }, - writable: true - }); - }); - - afterEach(() => { - vi.restoreAllMocks(); - }); - - describe('Component Rendering and Integration', () => { - it('should render init page with real components', async () => { - render(InitPage); - - await waitFor(() => { - // Should render all main components - expect(screen.getByRole('heading', { name: 'Welcome to GARM' })).toBeInTheDocument(); - expect(screen.getByText('Complete the first-run setup to get started')).toBeInTheDocument(); - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - expect(screen.getByLabelText('Email Address')).toBeInTheDocument(); - expect(screen.getByLabelText('Full Name')).toBeInTheDocument(); - expect(screen.getByLabelText('Password')).toBeInTheDocument(); - expect(screen.getByLabelText('Confirm Password')).toBeInTheDocument(); - expect(screen.getByRole('button', { name: /initialize garm/i })).toBeInTheDocument(); - }); - }); - - it('should render proper logo integration', async () => { - render(InitPage); - - await waitFor(() => { - const logos = screen.getAllByAltText('GARM'); - expect(logos).toHaveLength(2); - - // Should have proper src paths resolved - expect(resolve).toHaveBeenCalledWith('/assets/garm-light.svg'); - expect(resolve).toHaveBeenCalledWith('/assets/garm-dark.svg'); - }); - }); - - it('should integrate all form components properly', async () => { - render(InitPage); - - await waitFor(() => { - // All form elements should be integrated - const form = document.querySelector('form'); - const usernameInput = screen.getByLabelText('Username'); - const emailInput = screen.getByLabelText('Email Address'); - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - - expect(form).toBeInTheDocument(); - expect(usernameInput).toBeInTheDocument(); - expect(emailInput).toBeInTheDocument(); - expect(submitButton).toBeInTheDocument(); - }); - }); - - it('should integrate info banner with proper styling', async () => { - render(InitPage); - - await waitFor(() => { - const infoBanner = screen.getByText('First-Run Initialization'); - expect(infoBanner).toBeInTheDocument(); - - // Should have proper banner styling container - const bannerContainer = infoBanner.closest('.bg-blue-50'); - expect(bannerContainer).toBeInTheDocument(); - }); - }); - }); - - describe('Authentication State Integration', () => { - it('should handle initialization required state', async () => { - vi.mocked(authStore.subscribe).mockImplementation((callback: (state: any) => void) => { - callback(createMockAuthState({ needsInitialization: true, loading: false })); - return () => {}; - }); - - render(InitPage); - - await waitFor(() => { - // Should stay on page and render form - expect(screen.getByRole('heading', { name: 'Welcome to GARM' })).toBeInTheDocument(); - expect(goto).not.toHaveBeenCalled(); - }); - }); - - it('should handle authentication redirect integration', async () => { - // Mock already authenticated user - vi.mocked(authStore.subscribe).mockImplementation((callback: (state: any) => void) => { - callback(createMockAuthState({ isAuthenticated: true, user: 'testuser' })); - return () => {}; - }); - - render(InitPage); - - await waitFor(() => { - // Should automatically redirect to dashboard - expect(goto).toHaveBeenCalledWith('/'); - }); - }); - - it('should handle redirect to login when initialization not needed', async () => { - // Mock state where initialization is not needed - vi.mocked(authStore.subscribe).mockImplementation((callback: (state: any) => void) => { - callback(createMockAuthState({ needsInitialization: false, loading: false })); - return () => {}; - }); - - render(InitPage); - - await waitFor(() => { - // Should redirect to login page - expect(goto).toHaveBeenCalledWith('/login'); - }); - }); - - it('should handle reactive auth state changes', async () => { - // Mock store that changes state - let callback: (state: any) => void; - vi.mocked(authStore.subscribe).mockImplementation((cb: (state: any) => void) => { - callback = cb; - cb(createMockAuthState({ needsInitialization: true, loading: false })); - return () => {}; - }); - - render(InitPage); - - await waitFor(() => { - expect(authStore.subscribe).toHaveBeenCalled(); - }); - - // Simulate auth state change to authenticated - callback!(createMockAuthState({ isAuthenticated: true, user: 'testuser' })); - - await waitFor(() => { - expect(goto).toHaveBeenCalledWith('/'); - }); - }); - }); - - describe('Form Validation Integration', () => { - it('should integrate real-time validation feedback', async () => { - render(InitPage); - - await waitFor(() => { - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - }); - - const usernameInput = screen.getByLabelText('Username'); - - // Make field invalid with whitespace (will be trimmed to empty but has length > 0) - await fireEvent.input(usernameInput, { target: { value: ' ' } }); - - await waitFor(() => { - expect(screen.getByText('Username is required')).toBeInTheDocument(); - }); - }); - - it('should integrate email validation with UI feedback', async () => { - render(InitPage); - - await waitFor(() => { - expect(screen.getByLabelText('Email Address')).toBeInTheDocument(); - }); - - const emailInput = screen.getByLabelText('Email Address'); - - // Enter invalid email - await fireEvent.input(emailInput, { target: { value: 'invalid-email' } }); - - await waitFor(() => { - expect(screen.getByText('Please enter a valid email address')).toBeInTheDocument(); - }); - }); - - it('should integrate password validation workflow', async () => { - render(InitPage); - - await waitFor(() => { - expect(screen.getByLabelText('Password')).toBeInTheDocument(); - }); - - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - - // Test password length validation - await fireEvent.input(passwordInput, { target: { value: 'short' } }); - - await waitFor(() => { - expect(screen.getByText('Password must be at least 8 characters long')).toBeInTheDocument(); - }); - - // Test password confirmation validation - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'different123' } }); - - await waitFor(() => { - expect(screen.getByText('Passwords do not match')).toBeInTheDocument(); - }); - }); - - it('should integrate validation summary display', async () => { - render(InitPage); - - await waitFor(() => { - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - }); - - // Make username invalid with whitespace to trigger validation summary - const usernameInput = screen.getByLabelText('Username'); - await fireEvent.input(usernameInput, { target: { value: ' ' } }); - - await waitFor(() => { - expect(screen.getByText('Please complete all required fields')).toBeInTheDocument(); - expect(screen.getByText('Enter a username')).toBeInTheDocument(); - }); - }); - - it('should integrate form validation with button state', async () => { - render(InitPage); - - await waitFor(() => { - expect(screen.getByRole('button', { name: /initialize garm/i })).toBeInTheDocument(); - }); - - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - - // Button should be disabled initially (no passwords) - expect(submitButton).toBeDisabled(); - - // Fill in valid passwords - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'password123' } }); - - await waitFor(() => { - // Button should now be enabled - expect(submitButton).not.toBeDisabled(); - }); - }); - }); - - describe('Advanced Configuration Integration', () => { - it('should integrate advanced configuration toggle workflow', async () => { - render(InitPage); - - await waitFor(() => { - expect(screen.getByRole('button', { name: /advanced configuration/i })).toBeInTheDocument(); - }); - - const toggleButton = screen.getByRole('button', { name: /advanced configuration/i }); - - // Advanced fields should not be visible initially - expect(screen.queryByLabelText('Metadata URL')).not.toBeInTheDocument(); - - // Toggle to show advanced fields - await fireEvent.click(toggleButton); - - await waitFor(() => { - expect(screen.getByLabelText('Metadata URL')).toBeInTheDocument(); - expect(screen.getByLabelText('Callback URL')).toBeInTheDocument(); - expect(screen.getByLabelText('Webhook URL')).toBeInTheDocument(); - }); - - // Toggle to hide advanced fields - await fireEvent.click(toggleButton); - - await waitFor(() => { - expect(screen.queryByLabelText('Metadata URL')).not.toBeInTheDocument(); - }); - }); - - it('should integrate URL auto-population with form fields', async () => { - render(InitPage); - - await waitFor(() => { - expect(screen.getByRole('button', { name: /advanced configuration/i })).toBeInTheDocument(); - }); - - const toggleButton = screen.getByRole('button', { name: /advanced configuration/i }); - await fireEvent.click(toggleButton); - - await waitFor(() => { - const metadataInput = screen.getByLabelText('Metadata URL') as HTMLInputElement; - const callbackInput = screen.getByLabelText('Callback URL') as HTMLInputElement; - const webhookInput = screen.getByLabelText('Webhook URL') as HTMLInputElement; - - expect(metadataInput.value).toBe('https://garm.example.com/api/v1/metadata'); - expect(callbackInput.value).toBe('https://garm.example.com/api/v1/callbacks'); - expect(webhookInput.value).toBe('https://garm.example.com/webhooks'); - }); - }); - - it('should integrate custom URL input workflow', async () => { - render(InitPage); - - await waitFor(() => { - expect(screen.getByRole('button', { name: /advanced configuration/i })).toBeInTheDocument(); - }); - - const toggleButton = screen.getByRole('button', { name: /advanced configuration/i }); - await fireEvent.click(toggleButton); - - await waitFor(() => { - expect(screen.getByLabelText('Metadata URL')).toBeInTheDocument(); - }); - - const metadataInput = screen.getByLabelText('Metadata URL'); - - // User can override auto-populated URLs - await fireEvent.input(metadataInput, { target: { value: 'https://custom.example.com/metadata' } }); - - expect((metadataInput as HTMLInputElement).value).toBe('https://custom.example.com/metadata'); - }); - }); - - describe('Initialization Workflow Integration', () => { - it('should handle complete initialization workflow', async () => { - render(InitPage); - - await waitFor(() => { - expect(screen.getByLabelText('Password')).toBeInTheDocument(); - }); - - // Fill in valid form data - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'password123' } }); - - // Submit form - await fireEvent.click(submitButton); - - // Should call auth.initialize with correct parameters - await waitFor(() => { - expect(auth.initialize).toHaveBeenCalledWith( - 'admin', - 'admin@garm.local', - 'password123', - 'Administrator', - { - callbackUrl: 'https://garm.example.com/api/v1/callbacks', - metadataUrl: 'https://garm.example.com/api/v1/metadata', - webhookUrl: 'https://garm.example.com/webhooks' - } - ); - }); - }); - - it('should integrate success workflow with toast and redirect', async () => { - render(InitPage); - - await waitFor(() => { - expect(screen.getByLabelText('Password')).toBeInTheDocument(); - }); - - // Fill in valid form data - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'password123' } }); - - // Submit form - await fireEvent.click(submitButton); - - // Should show toast and redirect - await waitFor(() => { - expect(toastStore.success).toHaveBeenCalledWith( - 'GARM Initialized', - 'GARM has been successfully initialized. Welcome!' - ); - expect(goto).toHaveBeenCalledWith('/'); - }); - }); - - it('should integrate error handling with UI display', async () => { - const error = new Error('Initialization failed'); - (auth.initialize as any).mockRejectedValue(error); - - render(InitPage); - - await waitFor(() => { - expect(screen.getByLabelText('Password')).toBeInTheDocument(); - }); - - // Fill in valid form data - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'password123' } }); - - // Submit form - await fireEvent.click(submitButton); - - // Should display error in UI - await waitFor(() => { - expect(screen.getByText('Initialization failed')).toBeInTheDocument(); - }); - - // Should extract API error properly - expect(extractAPIError).toHaveBeenCalledWith(error); - expect(goto).not.toHaveBeenCalled(); - }); - - it('should handle loading state integration', async () => { - // Mock delayed initialization - let resolveInitialize: () => void; - const initializePromise = new Promise((resolve) => { - resolveInitialize = resolve; - }); - (auth.initialize as any).mockReturnValue(initializePromise); - - render(InitPage); - - await waitFor(() => { - expect(screen.getByLabelText('Password')).toBeInTheDocument(); - }); - - // Fill in valid form data - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'password123' } }); - - // Submit form - await fireEvent.click(submitButton); - - // Should show loading state - await waitFor(() => { - expect(screen.getByText('Initializing...')).toBeInTheDocument(); - expect(submitButton).toBeDisabled(); - }); - - // Complete initialization - resolveInitialize!(); - await initializePromise; - }); - }); - - describe('Advanced Configuration Workflow Integration', () => { - it('should integrate advanced configuration in initialization', async () => { - render(InitPage); - - await waitFor(() => { - expect(screen.getByRole('button', { name: /advanced configuration/i })).toBeInTheDocument(); - }); - - // Enable advanced configuration - const toggleButton = screen.getByRole('button', { name: /advanced configuration/i }); - await fireEvent.click(toggleButton); - - await waitFor(() => { - expect(screen.getByLabelText('Metadata URL')).toBeInTheDocument(); - }); - - // Customize URLs - const metadataInput = screen.getByLabelText('Metadata URL'); - const callbackInput = screen.getByLabelText('Callback URL'); - - await fireEvent.input(metadataInput, { target: { value: 'https://custom.example.com/metadata' } }); - await fireEvent.input(callbackInput, { target: { value: 'https://custom.example.com/callbacks' } }); - - // Fill in required fields - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'password123' } }); - - // Submit form - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - await fireEvent.click(submitButton); - - // Should use custom URLs in initialization - await waitFor(() => { - expect(auth.initialize).toHaveBeenCalledWith( - 'admin', - 'admin@garm.local', - 'password123', - 'Administrator', - { - callbackUrl: 'https://custom.example.com/callbacks', - metadataUrl: 'https://custom.example.com/metadata', - webhookUrl: 'https://garm.example.com/webhooks' - } - ); - }); - }); - - it('should integrate empty URL handling in advanced config', async () => { - render(InitPage); - - await waitFor(() => { - expect(screen.getByRole('button', { name: /advanced configuration/i })).toBeInTheDocument(); - }); - - // Enable advanced configuration - const toggleButton = screen.getByRole('button', { name: /advanced configuration/i }); - await fireEvent.click(toggleButton); - - await waitFor(() => { - expect(screen.getByLabelText('Metadata URL')).toBeInTheDocument(); - }); - - // URLs are auto-populated, verify they have default values - const metadataInput = screen.getByLabelText('Metadata URL') as HTMLInputElement; - const callbackInput = screen.getByLabelText('Callback URL') as HTMLInputElement; - const webhookInput = screen.getByLabelText('Webhook URL') as HTMLInputElement; - - // Verify auto-population works - expect(metadataInput.value).toBe('https://garm.example.com/api/v1/metadata'); - expect(callbackInput.value).toBe('https://garm.example.com/api/v1/callbacks'); - expect(webhookInput.value).toBe('https://garm.example.com/webhooks'); - - // Fill in required fields - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'password123' } }); - - // Submit form - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - await fireEvent.click(submitButton); - - // Should use auto-populated URLs (component design prevents empty URLs) - await waitFor(() => { - expect(auth.initialize).toHaveBeenCalledWith( - 'admin', - 'admin@garm.local', - 'password123', - 'Administrator', - { - callbackUrl: 'https://garm.example.com/api/v1/callbacks', - metadataUrl: 'https://garm.example.com/api/v1/metadata', - webhookUrl: 'https://garm.example.com/webhooks' - } - ); - }); - }); - }); - - describe('Form State Management Integration', () => { - it('should maintain form state during validation interactions', async () => { - render(InitPage); - - await waitFor(() => { - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - }); - - const usernameInput = screen.getByLabelText('Username') as HTMLInputElement; - const emailInput = screen.getByLabelText('Email Address') as HTMLInputElement; - - // Change values - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(emailInput, { target: { value: 'test@example.com' } }); - - // Values should be maintained - expect(usernameInput.value).toBe('testuser'); - expect(emailInput.value).toBe('test@example.com'); - - // Trigger validation with whitespace in username field - await fireEvent.input(usernameInput, { target: { value: ' ' } }); - - // Should show validation but maintain other field values - await waitFor(() => { - expect(screen.getByText('Username is required')).toBeInTheDocument(); - expect(emailInput.value).toBe('test@example.com'); // Other field maintained - }); - }); - - it('should integrate form submission prevention when invalid', async () => { - render(InitPage); - - await waitFor(() => { - expect(screen.getByRole('button', { name: /initialize garm/i })).toBeInTheDocument(); - }); - - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - - // Form should be invalid initially (no passwords) - expect(submitButton).toBeDisabled(); - - // Try to submit (should not call API) - await fireEvent.click(submitButton); - - // Should not call initialize API - expect(auth.initialize).not.toHaveBeenCalled(); - }); - - it('should handle form state persistence during advanced toggle', async () => { - render(InitPage); - - await waitFor(() => { - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - }); - - // Fill in form data - const usernameInput = screen.getByLabelText('Username') as HTMLInputElement; - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - - // Toggle advanced configuration - const toggleButton = screen.getByRole('button', { name: /advanced configuration/i }); - await fireEvent.click(toggleButton); - - await waitFor(() => { - expect(screen.getByLabelText('Metadata URL')).toBeInTheDocument(); - }); - - // Toggle back - await fireEvent.click(toggleButton); - - // Form data should be maintained - expect(usernameInput.value).toBe('testuser'); - }); - }); - - describe('Error Handling Integration', () => { - it('should integrate API error extraction and display', async () => { - const error = new Error('Server error occurred'); - (auth.initialize as any).mockRejectedValue(error); - (extractAPIError as any).mockReturnValue('Server error occurred'); - - render(InitPage); - - await waitFor(() => { - expect(screen.getByLabelText('Password')).toBeInTheDocument(); - }); - - // Fill in valid form data - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'password123' } }); - - // Submit form - await fireEvent.click(submitButton); - - // Should extract and display error - await waitFor(() => { - expect(extractAPIError).toHaveBeenCalledWith(error); - expect(screen.getByText('Server error occurred')).toBeInTheDocument(); - }); - }); - - it('should handle error state recovery', async () => { - // First cause an error - const error = new Error('First error'); - (auth.initialize as any).mockRejectedValue(error); - - render(InitPage); - - await waitFor(() => { - expect(screen.getByLabelText('Password')).toBeInTheDocument(); - }); - - // Fill in valid form data - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'password123' } }); - - // Trigger error - await fireEvent.click(submitButton); - - await waitFor(() => { - expect(screen.getByText('First error')).toBeInTheDocument(); - }); - - // Now mock success and try again - (auth.initialize as any).mockResolvedValue({}); - await fireEvent.click(submitButton); - - // Error should be cleared - await waitFor(() => { - expect(screen.queryByText('First error')).not.toBeInTheDocument(); - }); - }); - - it('should integrate error styling with theme', async () => { - const error = new Error('Initialization failed'); - (auth.initialize as any).mockRejectedValue(error); - - render(InitPage); - - await waitFor(() => { - expect(screen.getByLabelText('Password')).toBeInTheDocument(); - }); - - // Fill in valid form data and submit - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'password123' } }); - await fireEvent.click(submitButton); - - // Should display error with proper styling - await waitFor(() => { - const errorMessage = screen.getByText('Initialization failed'); - expect(errorMessage).toBeInTheDocument(); - - // Should have proper error styling container - const errorContainer = errorMessage.closest('.bg-red-50'); - expect(errorContainer).toBeInTheDocument(); - }); - }); - }); - - describe('Navigation Integration', () => { - it('should integrate path resolution', async () => { - render(InitPage); - - await waitFor(() => { - // Should resolve asset paths - expect(resolve).toHaveBeenCalledWith('/assets/garm-light.svg'); - expect(resolve).toHaveBeenCalledWith('/assets/garm-dark.svg'); - }); - }); - - it('should handle navigation on successful initialization', async () => { - render(InitPage); - - await waitFor(() => { - expect(screen.getByLabelText('Password')).toBeInTheDocument(); - }); - - // Fill in valid form data - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'password123' } }); - - // Submit form - await fireEvent.click(submitButton); - - // Should navigate to dashboard with resolved path - await waitFor(() => { - expect(goto).toHaveBeenCalledWith('/'); - }); - }); - - it('should integrate automatic redirect for authenticated users', async () => { - // Mock authenticated user from start - vi.mocked(authStore.subscribe).mockImplementation((callback: (state: any) => void) => { - callback(createMockAuthState({ isAuthenticated: true, user: 'existinguser' })); - return () => {}; - }); - - render(InitPage); - - // Should immediately redirect - await waitFor(() => { - expect(goto).toHaveBeenCalledWith('/'); - }); - }); - }); - - describe('Toast Integration', () => { - it('should integrate toast notifications with initialization success', async () => { - render(InitPage); - - await waitFor(() => { - expect(screen.getByLabelText('Password')).toBeInTheDocument(); - }); - - // Fill in valid form data - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'password123' } }); - - // Submit form - await fireEvent.click(submitButton); - - // Should show success toast - await waitFor(() => { - expect(toastStore.success).toHaveBeenCalledWith( - 'GARM Initialized', - 'GARM has been successfully initialized. Welcome!' - ); - }); - }); - - it('should not show toast on initialization errors', async () => { - const error = new Error('Initialization failed'); - (auth.initialize as any).mockRejectedValue(error); - - render(InitPage); - - await waitFor(() => { - expect(screen.getByLabelText('Password')).toBeInTheDocument(); - }); - - // Fill in valid form data - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'password123' } }); - - // Submit form - await fireEvent.click(submitButton); - - // Wait for error - await screen.findByText('Initialization failed'); - - // Should not show success toast - expect(toastStore.success).not.toHaveBeenCalled(); - }); - }); - - describe('Component Lifecycle Integration', () => { - it('should handle complete component lifecycle', () => { - const { unmount } = render(InitPage); - - // Should mount without errors - expect(screen.getByRole('heading', { name: 'Welcome to GARM' })).toBeInTheDocument(); - - // Should unmount without errors - expect(() => unmount()).not.toThrow(); - }); - - it('should integrate auth store subscription lifecycle', async () => { - render(InitPage); - - await waitFor(() => { - // Should subscribe to auth store - expect(authStore.subscribe).toHaveBeenCalled(); - }); - }); - - it('should handle reactive state updates', async () => { - // Mock store with reactive updates - let callback: (state: any) => void; - vi.mocked(authStore.subscribe).mockImplementation((cb: (state: any) => void) => { - callback = cb; - cb(createMockAuthState({ needsInitialization: true })); - return () => {}; - }); - - render(InitPage); - - await waitFor(() => { - expect(authStore.subscribe).toHaveBeenCalled(); - }); - - // Should handle reactive state change - callback!(createMockAuthState({ isAuthenticated: true, user: 'newuser' })); - - await waitFor(() => { - expect(goto).toHaveBeenCalledWith('/'); - }); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/init/page.render.test.ts b/webapp/src/routes/init/page.render.test.ts deleted file mode 100644 index 4f481d97..00000000 --- a/webapp/src/routes/init/page.render.test.ts +++ /dev/null @@ -1,639 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render, screen } from '@testing-library/svelte'; -import InitPage from './+page.svelte'; - -// Helper function to create complete AuthState objects -function createMockAuthState(overrides: any = {}) { - return { - isAuthenticated: false, - user: null, - loading: false, - needsInitialization: true, - ...overrides - }; -} - -// Mock all external dependencies -vi.mock('$app/navigation', () => ({ - goto: vi.fn() -})); - -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path: string) => path) -})); - -vi.mock('$lib/stores/auth.js', () => ({ - authStore: { - subscribe: vi.fn((callback: (state: any) => void) => { - callback(createMockAuthState()); - return () => {}; - }) - }, - auth: { - initialize: vi.fn() - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn() - } -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/Button.svelte'); - -describe('Init Page - Render Tests', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up default API mocks - const { auth } = await import('$lib/stores/auth.js'); - (auth.initialize as any).mockResolvedValue({}); - - const { resolve } = await import('$app/paths'); - (resolve as any).mockImplementation((path: string) => path); - - // Mock window.location for URL auto-population - Object.defineProperty(window, 'location', { - value: { - origin: 'https://garm.example.com' - }, - writable: true - }); - }); - - describe('Basic Rendering', () => { - it('should render without crashing', () => { - const { container } = render(InitPage); - expect(container).toBeInTheDocument(); - }); - - it('should have proper document structure', () => { - const { container } = render(InitPage); - expect(container.querySelector('.min-h-screen')).toBeInTheDocument(); - }); - - it('should render main layout container', () => { - render(InitPage); - - // Should have main container with proper styling - const mainContainer = document.querySelector('.min-h-screen.bg-gray-50.dark\\:bg-gray-900'); - expect(mainContainer).toBeInTheDocument(); - }); - - it('should render centered content areas', () => { - render(InitPage); - - // Should have centered header area - const headerArea = document.querySelector('.sm\\:mx-auto.sm\\:w-full.sm\\:max-w-md'); - expect(headerArea).toBeInTheDocument(); - - // Should have centered form area - const formArea = document.querySelector('.mt-8.sm\\:mx-auto.sm\\:w-full.sm\\:max-w-md'); - expect(formArea).toBeInTheDocument(); - }); - }); - - describe('Component Lifecycle', () => { - it('should mount successfully', () => { - const { component } = render(InitPage); - expect(component).toBeDefined(); - }); - - it('should unmount without errors', () => { - const { unmount } = render(InitPage); - expect(() => unmount()).not.toThrow(); - }); - - it('should handle component updates', () => { - const { component } = render(InitPage); - - // Component should handle reactive updates - expect(component).toBeDefined(); - }); - }); - - describe('DOM Structure', () => { - it('should create proper DOM hierarchy', () => { - const { container } = render(InitPage); - - // Should have main container - const mainContainer = container.querySelector('.min-h-screen'); - expect(mainContainer).toBeInTheDocument(); - - // Should have header area - const headerArea = container.querySelector('.sm\\:mx-auto.sm\\:w-full.sm\\:max-w-md'); - expect(headerArea).toBeInTheDocument(); - - // Should have form card - const formCard = container.querySelector('.bg-white.dark\\:bg-gray-800'); - expect(formCard).toBeInTheDocument(); - }); - - it('should render svelte:head for page title', () => { - render(InitPage); - - // Should set page title - expect(document.title).toBe('Initialize GARM - First Run Setup'); - }); - - it('should have responsive layout classes', () => { - render(InitPage); - - // Should have responsive layout - const mainContainer = document.querySelector('.min-h-screen.bg-gray-50.dark\\:bg-gray-900.flex.flex-col.justify-center.py-12.sm\\:px-6.lg\\:px-8'); - expect(mainContainer).toBeInTheDocument(); - }); - }); - - describe('Header Section Rendering', () => { - it('should render logo section', () => { - render(InitPage); - - // Should have logo container - const logoContainer = document.querySelector('.flex.justify-center'); - expect(logoContainer).toBeInTheDocument(); - }); - - it('should render both light and dark logos', () => { - render(InitPage); - - const logos = screen.getAllByAltText('GARM'); - expect(logos).toHaveLength(2); - - // Should have light logo (visible by default) - const lightLogo = logos.find(img => img.classList.contains('dark:hidden')); - expect(lightLogo).toBeInTheDocument(); - - // Should have dark logo (hidden by default) - const darkLogo = logos.find(img => img.classList.contains('hidden')); - expect(darkLogo).toBeInTheDocument(); - }); - - it('should render page title and description', () => { - render(InitPage); - - // Should render main heading - expect(screen.getByRole('heading', { name: 'Welcome to GARM' })).toBeInTheDocument(); - - // Should render description - expect(screen.getByText('Complete the first-run setup to get started')).toBeInTheDocument(); - }); - - it('should have proper heading hierarchy', () => { - render(InitPage); - - const heading = screen.getByRole('heading', { name: 'Welcome to GARM' }); - expect(heading.tagName).toBe('H1'); - expect(heading).toHaveClass('text-3xl', 'font-extrabold'); - }); - }); - - describe('Info Banner Rendering', () => { - it('should render initialization info banner', () => { - render(InitPage); - - // Should have info banner - const infoBanner = document.querySelector('.bg-blue-50.dark\\:bg-blue-900\\/20'); - expect(infoBanner).toBeInTheDocument(); - - // Should have info title - expect(screen.getByText('First-Run Initialization')).toBeInTheDocument(); - - // Should have info description - expect(screen.getByText(/GARM needs to be initialized before first use/)).toBeInTheDocument(); - }); - - it('should have proper info banner styling', () => { - render(InitPage); - - const infoBanner = document.querySelector('.bg-blue-50.dark\\:bg-blue-900\\/20.border.border-blue-200.dark\\:border-blue-800.rounded-md.p-4.mb-6'); - expect(infoBanner).toBeInTheDocument(); - }); - - it('should render info icon', () => { - render(InitPage); - - const infoIcon = document.querySelector('.h-5.w-5.text-blue-400'); - expect(infoIcon).toBeInTheDocument(); - }); - }); - - describe('Form Rendering', () => { - it('should render initialization form', () => { - render(InitPage); - - // Should have form element - const form = document.querySelector('form'); - expect(form).toBeInTheDocument(); - expect(form).toHaveClass('space-y-6'); - }); - - it('should render all form fields', () => { - render(InitPage); - - // Required fields - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - expect(screen.getByLabelText('Email Address')).toBeInTheDocument(); - expect(screen.getByLabelText('Full Name')).toBeInTheDocument(); - expect(screen.getByLabelText('Password')).toBeInTheDocument(); - expect(screen.getByLabelText('Confirm Password')).toBeInTheDocument(); - }); - - it('should render form fields with proper attributes', () => { - render(InitPage); - - const usernameInput = screen.getByLabelText('Username'); - expect(usernameInput).toHaveAttribute('type', 'text'); - expect(usernameInput).toHaveAttribute('name', 'username'); - expect(usernameInput).toHaveAttribute('required'); - - const emailInput = screen.getByLabelText('Email Address'); - expect(emailInput).toHaveAttribute('type', 'email'); - expect(emailInput).toHaveAttribute('name', 'email'); - expect(emailInput).toHaveAttribute('required'); - - const passwordInput = screen.getByLabelText('Password'); - expect(passwordInput).toHaveAttribute('type', 'password'); - expect(passwordInput).toHaveAttribute('name', 'password'); - expect(passwordInput).toHaveAttribute('required'); - }); - - it('should render submit button', () => { - render(InitPage); - - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - expect(submitButton).toBeInTheDocument(); - expect(submitButton).toHaveAttribute('type', 'submit'); - }); - - it('should have proper form styling', () => { - render(InitPage); - - // Should have form card container - const formCard = document.querySelector('.bg-white.dark\\:bg-gray-800.py-8.px-4.shadow.sm\\:rounded-lg.sm\\:px-10'); - expect(formCard).toBeInTheDocument(); - - // Form inputs should have consistent styling - const usernameInput = screen.getByLabelText('Username'); - expect(usernameInput).toHaveClass('appearance-none', 'block', 'w-full', 'px-3', 'py-2', 'border'); - }); - }); - - describe('Advanced Configuration Rendering', () => { - it('should render advanced configuration toggle', () => { - render(InitPage); - - const toggleButton = screen.getByRole('button', { name: /advanced configuration/i }); - expect(toggleButton).toBeInTheDocument(); - }); - - it('should not show advanced fields initially', () => { - render(InitPage); - - // Advanced fields should not be visible initially - expect(screen.queryByLabelText('Metadata URL')).not.toBeInTheDocument(); - expect(screen.queryByLabelText('Callback URL')).not.toBeInTheDocument(); - expect(screen.queryByLabelText('Webhook URL')).not.toBeInTheDocument(); - }); - - it('should have proper toggle button styling', () => { - render(InitPage); - - const toggleButton = screen.getByRole('button', { name: /advanced configuration/i }); - - // Should have ghost variant styling - expect(toggleButton).toHaveClass('text-gray-700', 'dark:text-gray-300'); - }); - - it('should render toggle icon', () => { - render(InitPage); - - // Should have chevron icon in toggle button - const chevronIcon = document.querySelector('.w-4.h-4.mr-2.transition-transform'); - expect(chevronIcon).toBeInTheDocument(); - }); - }); - - describe('Validation Messages Rendering', () => { - it('should not show validation messages initially', () => { - render(InitPage); - - // Should not have validation messages initially - expect(screen.queryByText('Username is required')).not.toBeInTheDocument(); - expect(screen.queryByText('Please enter a valid email address')).not.toBeInTheDocument(); - expect(screen.queryByText('Password must be at least 8 characters long')).not.toBeInTheDocument(); - }); - - it('should show validation summary with default values', () => { - render(InitPage); - - // Should show validation summary because form has default values but is missing passwords - // The validation summary shows when form is invalid AND has field content (which default values provide) - expect(screen.getByText('Please complete all required fields')).toBeInTheDocument(); - }); - - it('should have proper validation message styling structure ready', () => { - render(InitPage); - - // Form should be structured to accommodate validation messages - const form = document.querySelector('form'); - expect(form).toHaveClass('space-y-6'); - }); - }); - - describe('Error State Rendering', () => { - it('should not show error state initially', () => { - render(InitPage); - - // Should not have error container initially - const errorContainer = document.querySelector('.bg-red-50'); - expect(errorContainer).not.toBeInTheDocument(); - }); - - it('should conditionally render error display', () => { - render(InitPage); - - // Error display should be conditional (not visible initially) - expect(screen.queryByText(/error/i)).not.toBeInTheDocument(); - }); - }); - - describe('Button Integration', () => { - it('should integrate Button component', () => { - render(InitPage); - - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - expect(submitButton).toBeInTheDocument(); - - const toggleButton = screen.getByRole('button', { name: /advanced configuration/i }); - expect(toggleButton).toBeInTheDocument(); - }); - - it('should pass correct props to submit Button', () => { - render(InitPage); - - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - - // Should be submit type - expect(submitButton).toHaveAttribute('type', 'submit'); - - // Should have primary variant styling - expect(submitButton).toHaveClass('bg-blue-600'); - - // Should be full width - expect(submitButton).toHaveClass('w-full'); - }); - - it('should pass correct props to toggle Button', () => { - render(InitPage); - - const toggleButton = screen.getByRole('button', { name: /advanced configuration/i }); - - // Should be button type - expect(toggleButton).toHaveAttribute('type', 'button'); - - // Should have ghost variant styling - expect(toggleButton).toHaveClass('text-gray-700', 'dark:text-gray-300'); - }); - }); - - describe('Accessibility Features', () => { - it('should have proper form labels', () => { - render(InitPage); - - // All form fields should have accessible labels - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - expect(screen.getByLabelText('Email Address')).toBeInTheDocument(); - expect(screen.getByLabelText('Full Name')).toBeInTheDocument(); - expect(screen.getByLabelText('Password')).toBeInTheDocument(); - expect(screen.getByLabelText('Confirm Password')).toBeInTheDocument(); - }); - - it('should have proper form semantics', () => { - render(InitPage); - - // Should have form element - const form = document.querySelector('form'); - expect(form).toBeInTheDocument(); - - // Should have submit button - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - expect(submitButton).toHaveAttribute('type', 'submit'); - }); - - it('should support keyboard navigation', () => { - render(InitPage); - - const usernameInput = screen.getByLabelText('Username'); - const emailInput = screen.getByLabelText('Email Address'); - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - - // All elements should be focusable - expect(usernameInput).toBeInTheDocument(); - expect(emailInput).toBeInTheDocument(); - expect(submitButton).toBeInTheDocument(); - }); - - it('should have proper ARIA attributes', () => { - render(InitPage); - - // Form inputs should have proper attributes - const usernameInput = screen.getByLabelText('Username'); - expect(usernameInput).toHaveAttribute('required'); - - const emailInput = screen.getByLabelText('Email Address'); - expect(emailInput).toHaveAttribute('required'); - }); - }); - - describe('Theme Support', () => { - it('should have dark mode classes', () => { - render(InitPage); - - // Should have dark mode background - const mainContainer = document.querySelector('.dark\\:bg-gray-900'); - expect(mainContainer).toBeInTheDocument(); - - // Should have dark mode text colors - const heading = screen.getByRole('heading', { name: 'Welcome to GARM' }); - expect(heading).toHaveClass('dark:text-white'); - }); - - it('should handle theme-aware logo display', () => { - render(InitPage); - - const logos = screen.getAllByAltText('GARM'); - - // Light logo should be hidden in dark mode - const lightLogo = logos.find(img => img.classList.contains('dark:hidden')); - expect(lightLogo).toBeInTheDocument(); - - // Dark logo should be shown in dark mode - const darkLogo = logos.find(img => img.classList.contains('dark:block')); - expect(darkLogo).toBeInTheDocument(); - }); - - it('should have theme-aware input styling', () => { - render(InitPage); - - const usernameInput = screen.getByLabelText('Username'); - - // Should have dark mode classes - expect(usernameInput).toHaveClass('dark:border-gray-600'); - expect(usernameInput).toHaveClass('dark:bg-gray-700'); - expect(usernameInput).toHaveClass('dark:text-white'); - }); - - it('should have theme-aware form card styling', () => { - render(InitPage); - - const formCard = document.querySelector('.bg-white.dark\\:bg-gray-800'); - expect(formCard).toBeInTheDocument(); - }); - }); - - describe('Responsive Design', () => { - it('should use responsive layout classes', () => { - render(InitPage); - - // Should have responsive padding - const mainContainer = document.querySelector('.py-12.sm\\:px-6.lg\\:px-8'); - expect(mainContainer).toBeInTheDocument(); - }); - - it('should handle mobile-friendly layout', () => { - render(InitPage); - - // Should have mobile-optimized form - const headerArea = document.querySelector('.sm\\:mx-auto.sm\\:w-full.sm\\:max-w-md'); - expect(headerArea).toBeInTheDocument(); - - const formArea = document.querySelector('.mt-8.sm\\:mx-auto.sm\\:w-full.sm\\:max-w-md'); - expect(formArea).toBeInTheDocument(); - }); - - it('should have responsive typography', () => { - render(InitPage); - - const heading = screen.getByRole('heading', { name: 'Welcome to GARM' }); - - // Should use responsive text sizing - expect(heading).toHaveClass('text-3xl'); - }); - - it('should have responsive form card styling', () => { - render(InitPage); - - const formCard = document.querySelector('.py-8.px-4.shadow.sm\\:rounded-lg.sm\\:px-10'); - expect(formCard).toBeInTheDocument(); - }); - }); - - describe('Visual Hierarchy', () => { - it('should render elements in proper visual order', () => { - render(InitPage); - - // Logo should be first - const logoContainer = document.querySelector('.flex.justify-center'); - expect(logoContainer).toBeInTheDocument(); - - // Then heading - const heading = screen.getByRole('heading', { name: 'Welcome to GARM' }); - expect(heading).toBeInTheDocument(); - - // Then description - const description = screen.getByText('Complete the first-run setup to get started'); - expect(description).toBeInTheDocument(); - - // Then info banner - const infoBanner = screen.getByText('First-Run Initialization'); - expect(infoBanner).toBeInTheDocument(); - - // Then form - const form = document.querySelector('form'); - expect(form).toBeInTheDocument(); - }); - - it('should have proper spacing between sections', () => { - render(InitPage); - - // Main container should have spacing - const headerArea = document.querySelector('.sm\\:mx-auto.sm\\:w-full.sm\\:max-w-md'); - expect(headerArea).toBeInTheDocument(); - - // Form area should have top margin - const formArea = document.querySelector('.mt-8.sm\\:mx-auto.sm\\:w-full.sm\\:max-w-md'); - expect(formArea).toBeInTheDocument(); - - // Form should have spacing - const form = document.querySelector('form.space-y-6'); - expect(form).toBeInTheDocument(); - }); - - it('should use consistent typography scale', () => { - render(InitPage); - - const heading = screen.getByRole('heading', { name: 'Welcome to GARM' }); - const description = screen.getByText('Complete the first-run setup to get started'); - const infoTitle = screen.getByText('First-Run Initialization'); - - // Main heading should be largest - expect(heading).toHaveClass('text-3xl', 'font-extrabold'); - - // Description should be smaller - expect(description).toHaveClass('text-sm'); - - // Info title should be medium - expect(infoTitle).toHaveClass('text-sm', 'font-medium'); - }); - }); - - describe('Loading State Rendering', () => { - it('should render button in normal state initially', () => { - render(InitPage); - - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - expect(screen.getByText('Initialize GARM')).toBeInTheDocument(); - }); - - it('should support loading state styling', () => { - render(InitPage); - - // Button should be ready to show loading state - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - expect(submitButton).toBeInTheDocument(); - }); - - it('should support disabled form states', () => { - render(InitPage); - - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - - // Button should be disabled initially (passwords empty) - expect(submitButton).toBeDisabled(); - }); - }); - - describe('Help Text Rendering', () => { - it('should render help text section', () => { - render(InitPage); - - // Should have help text (be more specific to avoid matching the info banner) - expect(screen.getByText(/This will create the admin user, generate a unique controller ID, and configure the required URLs/)).toBeInTheDocument(); - expect(screen.getByText(/Make sure to remember these credentials/)).toBeInTheDocument(); - }); - - it('should have proper help text styling', () => { - render(InitPage); - - const helpText = document.querySelector('.mt-6 .text-center .text-xs.text-gray-500.dark\\:text-gray-400'); - expect(helpText).toBeInTheDocument(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/init/page.test.ts b/webapp/src/routes/init/page.test.ts deleted file mode 100644 index 35e1e5f8..00000000 --- a/webapp/src/routes/init/page.test.ts +++ /dev/null @@ -1,573 +0,0 @@ -import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; -import { render, screen, fireEvent } from '@testing-library/svelte'; -import InitPage from './+page.svelte'; - -// Helper function to create complete AuthState objects -function createMockAuthState(overrides: any = {}) { - return { - isAuthenticated: false, - user: null, - loading: false, - needsInitialization: true, - ...overrides - }; -} - -// Mock the page stores -vi.mock('$app/navigation', () => ({ - goto: vi.fn() -})); - -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path: string) => path) -})); - -// Mock the auth store -vi.mock('$lib/stores/auth.js', () => ({ - authStore: { - subscribe: vi.fn((callback: (state: any) => void) => { - callback(createMockAuthState()); - return () => {}; - }) - }, - auth: { - initialize: vi.fn() - } -})); - -// Mock toast store -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn() - } -})); - -// Mock utilities -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/Button.svelte'); - -// Global setup for each test -let auth: any; -let authStore: any; -let goto: any; -let resolve: any; -let toastStore: any; - -describe('Init Page - Unit Tests', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up mocks - const authModule = await import('$lib/stores/auth.js'); - auth = authModule.auth; - authStore = authModule.authStore; - - const navigationModule = await import('$app/navigation'); - goto = navigationModule.goto; - - const pathsModule = await import('$app/paths'); - resolve = pathsModule.resolve; - - const toastModule = await import('$lib/stores/toast.js'); - toastStore = toastModule.toastStore; - - // Set up default API mocks - (auth.initialize as any).mockResolvedValue({}); - (resolve as any).mockImplementation((path: string) => path); - - // Mock window.location for URL auto-population - Object.defineProperty(window, 'location', { - value: { - origin: 'https://garm.example.com' - }, - writable: true - }); - }); - - afterEach(() => { - vi.restoreAllMocks(); - }); - - describe('Component Initialization', () => { - it('should render successfully', () => { - const { container } = render(InitPage); - expect(container).toBeInTheDocument(); - }); - - it('should set page title', () => { - render(InitPage); - expect(document.title).toBe('Initialize GARM - First Run Setup'); - }); - - it('should render init form elements', () => { - render(InitPage); - - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - expect(screen.getByLabelText('Email Address')).toBeInTheDocument(); - expect(screen.getByLabelText('Full Name')).toBeInTheDocument(); - expect(screen.getByLabelText('Password')).toBeInTheDocument(); - expect(screen.getByLabelText('Confirm Password')).toBeInTheDocument(); - expect(screen.getByRole('button', { name: /initialize garm/i })).toBeInTheDocument(); - }); - - it('should render GARM logo and branding', () => { - render(InitPage); - - expect(screen.getByText('Welcome to GARM')).toBeInTheDocument(); - expect(screen.getByText('Complete the first-run setup to get started')).toBeInTheDocument(); - expect(screen.getAllByAltText('GARM')).toHaveLength(2); // Light and dark logos - }); - - it('should render initialization info banner', () => { - render(InitPage); - - expect(screen.getByText('First-Run Initialization')).toBeInTheDocument(); - expect(screen.getByText(/GARM needs to be initialized before first use/)).toBeInTheDocument(); - }); - }); - - describe('Default Form Values', () => { - it('should have default values populated', () => { - render(InitPage); - - const usernameInput = screen.getByLabelText('Username') as HTMLInputElement; - const emailInput = screen.getByLabelText('Email Address') as HTMLInputElement; - const fullNameInput = screen.getByLabelText('Full Name') as HTMLInputElement; - - expect(usernameInput.value).toBe('admin'); - expect(emailInput.value).toBe('admin@garm.local'); - expect(fullNameInput.value).toBe('Administrator'); - }); - - it('should have empty password fields by default', () => { - render(InitPage); - - const passwordInput = screen.getByLabelText('Password') as HTMLInputElement; - const confirmPasswordInput = screen.getByLabelText('Confirm Password') as HTMLInputElement; - - expect(passwordInput.value).toBe(''); - expect(confirmPasswordInput.value).toBe(''); - }); - }); - - describe('Authentication Redirect Logic', () => { - it('should redirect to dashboard when user is already authenticated', () => { - vi.mocked(authStore.subscribe).mockImplementation((callback: (state: any) => void) => { - callback(createMockAuthState({ isAuthenticated: true, user: 'testuser' })); - return () => {}; - }); - - render(InitPage); - - expect(goto).toHaveBeenCalledWith('/'); - }); - - it('should redirect to login when initialization not needed', () => { - vi.mocked(authStore.subscribe).mockImplementation((callback: (state: any) => void) => { - callback(createMockAuthState({ needsInitialization: false, loading: false })); - return () => {}; - }); - - render(InitPage); - - expect(goto).toHaveBeenCalledWith('/login'); - }); - - it('should stay on page when initialization is needed', () => { - vi.mocked(authStore.subscribe).mockImplementation((callback: (state: any) => void) => { - callback(createMockAuthState({ needsInitialization: true, loading: false })); - return () => {}; - }); - - render(InitPage); - - expect(goto).not.toHaveBeenCalled(); - }); - }); - - describe('Form Validation', () => { - it('should validate username field', async () => { - render(InitPage); - - const usernameInput = screen.getByLabelText('Username'); - - // Make field invalid with whitespace (will be trimmed to empty but has length > 0) - await fireEvent.input(usernameInput, { target: { value: ' ' } }); - - expect(screen.getByText('Username is required')).toBeInTheDocument(); - }); - - it('should validate email field', async () => { - render(InitPage); - - const emailInput = screen.getByLabelText('Email Address'); - - // Enter invalid email - await fireEvent.input(emailInput, { target: { value: 'invalid-email' } }); - - expect(screen.getByText('Please enter a valid email address')).toBeInTheDocument(); - }); - - it('should validate full name field', async () => { - render(InitPage); - - const fullNameInput = screen.getByLabelText('Full Name'); - - // Make field invalid with whitespace (will be trimmed to empty but has length > 0) - await fireEvent.input(fullNameInput, { target: { value: ' ' } }); - - expect(screen.getByText('Full name is required')).toBeInTheDocument(); - }); - - it('should validate password length', async () => { - render(InitPage); - - const passwordInput = screen.getByLabelText('Password'); - - // Enter short password - await fireEvent.input(passwordInput, { target: { value: '123' } }); - - expect(screen.getByText('Password must be at least 8 characters long')).toBeInTheDocument(); - }); - - it('should validate password confirmation', async () => { - render(InitPage); - - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - - // Enter mismatching passwords - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'different123' } }); - - expect(screen.getByText('Passwords do not match')).toBeInTheDocument(); - }); - - it('should show validation summary when form is invalid', async () => { - render(InitPage); - - // Make username invalid with whitespace to trigger validation summary - const usernameInput = screen.getByLabelText('Username'); - await fireEvent.input(usernameInput, { target: { value: ' ' } }); - - expect(screen.getByText('Please complete all required fields')).toBeInTheDocument(); - }); - }); - - describe('Advanced Configuration', () => { - it('should toggle advanced configuration panel', async () => { - render(InitPage); - - const toggleButton = screen.getByRole('button', { name: /advanced configuration/i }); - - // Advanced section should not be visible initially - expect(screen.queryByLabelText('Metadata URL')).not.toBeInTheDocument(); - - // Click to show advanced section - await fireEvent.click(toggleButton); - - expect(screen.getByLabelText('Metadata URL')).toBeInTheDocument(); - expect(screen.getByLabelText('Callback URL')).toBeInTheDocument(); - expect(screen.getByLabelText('Webhook URL')).toBeInTheDocument(); - }); - - it('should auto-populate URL fields', async () => { - render(InitPage); - - const toggleButton = screen.getByRole('button', { name: /advanced configuration/i }); - await fireEvent.click(toggleButton); - - const metadataInput = screen.getByLabelText('Metadata URL') as HTMLInputElement; - const callbackInput = screen.getByLabelText('Callback URL') as HTMLInputElement; - const webhookInput = screen.getByLabelText('Webhook URL') as HTMLInputElement; - - expect(metadataInput.value).toBe('https://garm.example.com/api/v1/metadata'); - expect(callbackInput.value).toBe('https://garm.example.com/api/v1/callbacks'); - expect(webhookInput.value).toBe('https://garm.example.com/webhooks'); - }); - }); - - describe('Form Submission', () => { - it('should call auth.initialize with correct parameters on successful submission', async () => { - render(InitPage); - - // Fill in valid form data - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'password123' } }); - - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - await fireEvent.click(submitButton); - - expect(auth.initialize).toHaveBeenCalledWith( - 'admin', - 'admin@garm.local', - 'password123', - 'Administrator', - { - callbackUrl: 'https://garm.example.com/api/v1/callbacks', - metadataUrl: 'https://garm.example.com/api/v1/metadata', - webhookUrl: 'https://garm.example.com/webhooks' - } - ); - }); - - it('should show success toast and redirect on successful initialization', async () => { - render(InitPage); - - // Fill in valid form data - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'password123' } }); - - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - await fireEvent.click(submitButton); - - // Wait for async operations - await new Promise(resolve => setTimeout(resolve, 0)); - - expect(toastStore.success).toHaveBeenCalledWith( - 'GARM Initialized', - 'GARM has been successfully initialized. Welcome!' - ); - expect(goto).toHaveBeenCalledWith('/'); - }); - - it('should handle initialization errors', async () => { - const error = new Error('Initialization failed'); - (auth.initialize as any).mockRejectedValue(error); - - render(InitPage); - - // Fill in valid form data - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'password123' } }); - - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - await fireEvent.click(submitButton); - - // Wait for error to appear - await screen.findByText('Initialization failed'); - expect(goto).not.toHaveBeenCalled(); - }); - - it('should not submit if form is invalid', async () => { - render(InitPage); - - // Leave passwords empty to make form invalid - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - await fireEvent.click(submitButton); - - expect(auth.initialize).not.toHaveBeenCalled(); - }); - }); - - describe('Loading States', () => { - it('should show loading state during initialization', async () => { - // Mock initialize to return a promise that doesn't resolve immediately - let resolveInitialize: () => void; - const initializePromise = new Promise((resolve) => { - resolveInitialize = resolve; - }); - (auth.initialize as any).mockReturnValue(initializePromise); - - render(InitPage); - - // Fill in valid form data - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'password123' } }); - - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - await fireEvent.click(submitButton); - - // Should show loading state - await screen.findByText('Initializing...'); - expect(submitButton).toBeDisabled(); - - // Complete the initialization - resolveInitialize!(); - await initializePromise; - }); - - it('should clear loading state after initialization failure', async () => { - const error = new Error('Initialization failed'); - (auth.initialize as any).mockRejectedValue(error); - - render(InitPage); - - // Fill in valid form data - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'password123' } }); - - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - await fireEvent.click(submitButton); - - // Wait for error handling - await screen.findByText('Initialization failed'); - - // Should not be in loading state anymore - expect(screen.queryByText('Initializing...')).not.toBeInTheDocument(); - expect(screen.getByText('Initialize GARM')).toBeInTheDocument(); - expect(submitButton).not.toBeDisabled(); - }); - }); - - describe('Error Display', () => { - it('should clear error when starting new initialization attempt', async () => { - // First, cause an error - const error = new Error('Initialization failed'); - (auth.initialize as any).mockRejectedValue(error); - - render(InitPage); - - // Fill in valid form data - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'password123' } }); - - // Trigger error - await fireEvent.click(submitButton); - await screen.findByText('Initialization failed'); - - // Now mock success and try again - (auth.initialize as any).mockResolvedValue({}); - await fireEvent.click(submitButton); - - // Wait for async operations and error should be cleared - await new Promise(resolve => setTimeout(resolve, 0)); - expect(screen.queryByText('Initialization failed')).not.toBeInTheDocument(); - }); - - it('should display API errors with proper formatting', async () => { - const error = new Error('Server temporarily unavailable'); - (auth.initialize as any).mockRejectedValue(error); - - render(InitPage); - - // Fill in valid form data - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'password123' } }); - - // Enter credentials and submit - await fireEvent.click(submitButton); - - // Should display error message - const errorElement = await screen.findByText('Server temporarily unavailable'); - expect(errorElement).toBeInTheDocument(); - - // Should have proper error styling - const errorContainer = errorElement.closest('.bg-red-50'); - expect(errorContainer).toBeInTheDocument(); - }); - }); - - describe('Component Lifecycle', () => { - it('should mount successfully', () => { - const { component } = render(InitPage); - expect(component).toBeDefined(); - }); - - it('should unmount without errors', () => { - const { unmount } = render(InitPage); - expect(() => unmount()).not.toThrow(); - }); - - it('should subscribe to auth store on mount', () => { - render(InitPage); - expect(authStore.subscribe).toHaveBeenCalled(); - }); - }); - - describe('Form State Management', () => { - it('should maintain form state during interactions', async () => { - render(InitPage); - - const usernameInput = screen.getByLabelText('Username') as HTMLInputElement; - const emailInput = screen.getByLabelText('Email Address') as HTMLInputElement; - - // Enter values - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(emailInput, { target: { value: 'test@example.com' } }); - - // Values should be maintained - expect(usernameInput.value).toBe('testuser'); - expect(emailInput.value).toBe('test@example.com'); - }); - - it('should update button state based on form validity', async () => { - render(InitPage); - - const submitButton = screen.getByRole('button', { name: /initialize garm/i }); - - // Button should be disabled initially (no passwords) - expect(submitButton).toBeDisabled(); - - // Fill in passwords to make form valid - const passwordInput = screen.getByLabelText('Password'); - const confirmPasswordInput = screen.getByLabelText('Confirm Password'); - - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.input(confirmPasswordInput, { target: { value: 'password123' } }); - - // Button should now be enabled - expect(submitButton).not.toBeDisabled(); - }); - }); - - describe('URL Auto-population', () => { - it('should update URLs when window.location changes', async () => { - const { unmount } = render(InitPage); - - const toggleButton = screen.getByRole('button', { name: /advanced configuration/i }); - await fireEvent.click(toggleButton); - - // Check initial URLs - const metadataInput = screen.getByLabelText('Metadata URL') as HTMLInputElement; - expect(metadataInput.value).toBe('https://garm.example.com/api/v1/metadata'); - - // Clean up first render - unmount(); - - // Simulate location change (this would happen in real browser) - Object.defineProperty(window, 'location', { - value: { - origin: 'https://new-garm.example.com' - }, - writable: true - }); - - // Re-render component to trigger reactive updates - render(InitPage); - - const toggleButton2 = screen.getByRole('button', { name: /advanced configuration/i }); - await fireEvent.click(toggleButton2); - - const metadataInput2 = screen.getByLabelText('Metadata URL') as HTMLInputElement; - expect(metadataInput2.value).toBe('https://new-garm.example.com/api/v1/metadata'); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/instances/+page.svelte b/webapp/src/routes/instances/+page.svelte deleted file mode 100644 index 19bf30c5..00000000 --- a/webapp/src/routes/instances/+page.svelte +++ /dev/null @@ -1,281 +0,0 @@ - - - - Instances - GARM - - -
                - - - {#if error} -
                -
                -
                -

                Error

                -
                {error}
                -
                -
                -
                - {/if} - - - -
                - - -{#if showDeleteModal && instanceToDelete} - { - showDeleteModal = false; - instanceToDelete = null; - }} - on:confirm={confirmDelete} - /> -{/if} \ No newline at end of file diff --git a/webapp/src/routes/instances/[id]/+page.svelte b/webapp/src/routes/instances/[id]/+page.svelte deleted file mode 100644 index 40fa3be5..00000000 --- a/webapp/src/routes/instances/[id]/+page.svelte +++ /dev/null @@ -1,342 +0,0 @@ - - - - {instance ? `${instance.name} - Instance Details` : 'Instance Details'} - GARM - - -
                - - - - {#if error} -
                -
                -
                -

                Error

                -
                {error}
                -
                -
                -
                - {/if} - - {#if loading} -
                -
                -
                -

                Loading instance details...

                -
                -
                - {:else if instance} - -
                - -
                -
                -

                Instance Information

                -
                - -
                -
                -
                -
                -
                ID:
                -
                {instance.id}
                -
                -
                -
                Name:
                -
                {instance.name}
                -
                -
                -
                Provider ID:
                -
                {instance.provider_id}
                -
                -
                -
                Provider:
                -
                {instance.provider_name || 'Unknown'}
                -
                -
                -
                Pool/Scale Set:
                -
                - {#if instance.pool_id} - - {instance.pool_id} - - {:else if instance.scale_set_id} - - {instance.scale_set_id} - - {:else} - - - {/if} -
                -
                -
                -
                Agent ID:
                -
                {instance.agent_id || 'Not assigned'}
                -
                -
                -
                Created At:
                -
                {formatDate(instance.created_at!)}
                -
                - {#if instance.updated_at && instance.updated_at !== instance.created_at} -
                -
                Updated At:
                -
                {formatDate(instance.updated_at)}
                -
                - {/if} -
                -
                - - -
                -

                Status & Network

                -
                -
                -
                Instance Status:
                -
                - - {formatStatusText(instance.status || 'unknown')} - -
                -
                -
                -
                Runner Status:
                -
                - - {formatStatusText(instance.runner_status || 'unknown')} - -
                -
                - {#if instance.addresses && instance.addresses.length > 0} -
                -
                Network Addresses:
                -
                - {#each instance.addresses as address} -
                - {address.address} - -
                - {/each} -
                -
                - {:else} -
                -
                Network Addresses:
                -
                No addresses available
                -
                - {/if} - {#if instance.os_type} -
                -
                OS Type:
                -
                {instance.os_type}
                -
                - {/if} - {#if instance.os_name} -
                -
                OS Name:
                -
                {instance.os_name}
                -
                - {/if} - {#if instance.os_version} -
                -
                OS Version:
                -
                {instance.os_version}
                -
                - {/if} - {#if instance.os_arch} -
                -
                OS Architecture:
                -
                {instance.os_arch}
                -
                - {/if} -
                -
                -
                - - {#if instance.status_messages && instance.status_messages.length > 0} - -
                -

                Status Messages

                -
                - {#each instance.status_messages as message} -
                -
                -

                {message.message}

                -
                - {#if message.event_level} - {@const levelBadge = getEventLevelBadge(message.event_level)} - - {/if} - - {#if message.created_at} - {formatDate(message.created_at)} - {:else} - Unknown date - {/if} - -
                -
                -
                - {/each} -
                -
                - {:else} - -
                -

                Status Messages

                -
                - - - -

                No status messages available

                -
                -
                - {/if} - {:else} -
                -
                - Instance not found. -
                -
                - {/if} -
                - - -{#if showDeleteModal && instance} - showDeleteModal = false} - on:confirm={handleDelete} - /> -{/if} \ No newline at end of file diff --git a/webapp/src/routes/instances/[id]/page.integration.test.ts b/webapp/src/routes/instances/[id]/page.integration.test.ts deleted file mode 100644 index 13f4c3b4..00000000 --- a/webapp/src/routes/instances/[id]/page.integration.test.ts +++ /dev/null @@ -1,708 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render, screen, waitFor, fireEvent } from '@testing-library/svelte'; -import InstanceDetailsPage from './+page.svelte'; -import { createMockInstance } from '../../../test/factories.js'; - -// Mock app stores and navigation -vi.mock('$app/stores', () => ({ - page: { - subscribe: vi.fn((callback) => { - callback({ - params: { id: 'test-instance' }, - url: { pathname: '/instances/test-instance' } - }); - return () => {}; - }) - } -})); - -vi.mock('$app/navigation', () => ({ - goto: vi.fn() -})); - -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path) => path) -})); - -const mockInstance = createMockInstance({ - id: 'inst-123', - name: 'test-instance', - provider_id: 'prov-123', - provider_name: 'hetzner', - status: 'running', - runner_status: 'idle', - agent_id: 12345, - pool_id: 'pool-123', - os_type: 'linux', - os_name: 'ubuntu', - os_version: '22.04', - os_arch: 'amd64', - addresses: [ - { address: '192.168.1.100', type: 'private' }, - { address: '203.0.113.10', type: 'public' } - ], - status_messages: [ - { - message: 'Instance started successfully', - event_level: 'info', - created_at: '2024-01-01T10:00:00Z' - }, - { - message: 'Runner job completed', - event_level: 'info', - created_at: '2024-01-01T11:00:00Z' - }, - { - message: 'Warning: High memory usage detected', - event_level: 'warning', - created_at: '2024-01-01T12:00:00Z' - } - ] -}); - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/DeleteModal.svelte'); -vi.unmock('$lib/components/Badge.svelte'); - -// Only mock the data layer - APIs and stores -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - getInstance: vi.fn(), - deleteInstance: vi.fn() - } -})); - -vi.mock('$lib/stores/websocket.js', () => ({ - websocketStore: { - subscribeToEntity: vi.fn(() => vi.fn()) - } -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -vi.mock('$lib/utils/status.js', () => ({ - formatStatusText: vi.fn((status) => { - if (!status) return 'Unknown'; - return status.charAt(0).toUpperCase() + status.slice(1); - }), - getStatusBadgeClass: vi.fn((status) => { - switch (status) { - case 'running': return 'bg-green-100 text-green-800 ring-green-200'; - case 'idle': return 'bg-blue-100 text-blue-800 ring-blue-200'; - case 'pending': return 'bg-yellow-100 text-yellow-800 ring-yellow-200'; - case 'error': return 'bg-red-100 text-red-800 ring-red-200'; - default: return 'bg-gray-100 text-gray-800 ring-gray-200'; - } - }) -})); - -vi.mock('$lib/utils/common.js', () => ({ - formatDate: vi.fn((date) => { - const d = new Date(date); - return d.toLocaleDateString() + ' ' + d.toLocaleTimeString(); - }), - scrollToBottomEvents: vi.fn(), - getEventLevelBadge: vi.fn((level) => { - switch (level) { - case 'error': return { variant: 'danger', text: 'Error' }; - case 'warning': return { variant: 'warning', text: 'Warning' }; - case 'info': return { variant: 'info', text: 'Info' }; - default: return { variant: 'info', text: 'Info' }; - } - }) -})); - -// Global setup for each test -let garmApi: any; -let websocketStore: any; - -describe('Comprehensive Integration Tests for Instance Details Page', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up API mocks with default successful responses - const apiModule = await import('$lib/api/client.js'); - garmApi = apiModule.garmApi; - - const wsModule = await import('$lib/stores/websocket.js'); - websocketStore = wsModule.websocketStore; - - (garmApi.getInstance as any).mockResolvedValue(mockInstance); - (garmApi.deleteInstance as any).mockResolvedValue({}); - (websocketStore.subscribeToEntity as any).mockReturnValue(vi.fn()); - }); - - describe('Component Rendering and Data Display', () => { - it('should render instance details page with real components', async () => { - render(InstanceDetailsPage); - - await waitFor(() => { - // Wait for data to load - expect(garmApi.getInstance).toHaveBeenCalledWith('test-instance'); - }); - - // Should render the breadcrumb navigation - expect(screen.getByRole('navigation', { name: 'Breadcrumb' })).toBeInTheDocument(); - - // Should render main content sections - expect(screen.getByText('Instance Information')).toBeInTheDocument(); - expect(screen.getByText('Status & Network')).toBeInTheDocument(); - }); - - it('should display instance data in information cards', async () => { - render(InstanceDetailsPage); - - await waitFor(() => { - // Wait for data loading to complete - expect(garmApi.getInstance).toHaveBeenCalled(); - }); - - // Should display instance basic information (using getAllByText for duplicate elements) - expect(screen.getAllByText('test-instance')[0]).toBeInTheDocument(); - expect(screen.getByText('inst-123')).toBeInTheDocument(); - expect(screen.getByText('prov-123')).toBeInTheDocument(); - expect(screen.getByText('hetzner')).toBeInTheDocument(); - expect(screen.getByText('12345')).toBeInTheDocument(); - }); - - it('should render status and network information', async () => { - render(InstanceDetailsPage); - - await waitFor(() => { - expect(garmApi.getInstance).toHaveBeenCalled(); - }); - - // Should display status information - expect(screen.getByText('Instance Status:')).toBeInTheDocument(); - expect(screen.getByText('Runner Status:')).toBeInTheDocument(); - - // Should display network addresses section - expect(screen.getByText('Network Addresses:')).toBeInTheDocument(); - // Note: The DOM shows "No addresses available", which suggests the mock addresses aren't being loaded - // This could be due to the factory or mock setup - let's verify the basic structure is there - expect(screen.getByText('Status & Network')).toBeInTheDocument(); - }); - }); - - describe('Status Messages Integration', () => { - it('should display status messages with proper formatting', async () => { - render(InstanceDetailsPage); - - await waitFor(() => { - expect(garmApi.getInstance).toHaveBeenCalled(); - }); - - // Should display status messages section - expect(screen.getByText('Status Messages')).toBeInTheDocument(); - // Note: The DOM shows "No status messages available", which suggests the mock messages aren't being loaded - // This could be due to the factory or mock setup - let's verify the basic structure is there - expect(screen.getByText(/No status messages available|Instance started successfully/i)).toBeInTheDocument(); - }); - - it('should handle empty status messages', async () => { - const instanceWithoutMessages = { ...mockInstance, status_messages: [] }; - (garmApi.getInstance as any).mockResolvedValue(instanceWithoutMessages); - - render(InstanceDetailsPage); - - await waitFor(() => { - expect(garmApi.getInstance).toHaveBeenCalled(); - }); - - // Should display empty state - expect(screen.getByText(/No status messages available/i)).toBeInTheDocument(); - }); - - it('should auto-scroll status messages on load', async () => { - const { scrollToBottomEvents } = await import('$lib/utils/common.js'); - - render(InstanceDetailsPage); - - await waitFor(() => { - expect(garmApi.getInstance).toHaveBeenCalled(); - }); - - // Should call scroll function after loading - await new Promise(resolve => setTimeout(resolve, 150)); - expect(scrollToBottomEvents).toHaveBeenCalled(); - }); - }); - - describe('Navigation Integration', () => { - it('should render breadcrumb navigation with working links', async () => { - render(InstanceDetailsPage); - - await waitFor(() => { - expect(garmApi.getInstance).toHaveBeenCalled(); - }); - - // Should have working breadcrumb navigation - const instancesLink = screen.getByRole('link', { name: /Instances/i }); - expect(instancesLink).toBeInTheDocument(); - expect(instancesLink).toHaveAttribute('href', '/instances'); - }); - - it('should handle pool/scale set navigation links', async () => { - render(InstanceDetailsPage); - - await waitFor(() => { - expect(garmApi.getInstance).toHaveBeenCalled(); - }); - - // Should have pool navigation link - const poolLink = screen.getByRole('link', { name: 'pool-123' }); - expect(poolLink).toBeInTheDocument(); - expect(poolLink).toHaveAttribute('href', '/pools/pool-123'); - }); - - it('should handle scale set navigation when applicable', async () => { - const instanceWithScaleSet = { - ...mockInstance, - pool_id: undefined, - scale_set_id: 'scaleset-456' - }; - (garmApi.getInstance as any).mockResolvedValue(instanceWithScaleSet); - - render(InstanceDetailsPage); - - await waitFor(() => { - expect(garmApi.getInstance).toHaveBeenCalled(); - }); - - // Should have scale set navigation link - const scaleSetLink = screen.getByRole('link', { name: 'scaleset-456' }); - expect(scaleSetLink).toBeInTheDocument(); - expect(scaleSetLink).toHaveAttribute('href', '/scalesets/scaleset-456'); - }); - }); - - describe('Delete Integration', () => { - it('should handle delete instance workflow', async () => { - render(InstanceDetailsPage); - - await waitFor(() => { - // Wait for data to load through API integration - expect(garmApi.getInstance).toHaveBeenCalled(); - }); - - // Delete API should be available for the delete workflow - expect(garmApi.deleteInstance).toBeDefined(); - - // Should have delete button - expect(screen.getByRole('button', { name: /Delete Instance/i })).toBeInTheDocument(); - }); - - it('should show delete modal on button click', async () => { - render(InstanceDetailsPage); - - await waitFor(() => { - expect(garmApi.getInstance).toHaveBeenCalled(); - }); - - // Click delete button - const deleteButton = screen.getByRole('button', { name: /Delete Instance/i }); - await fireEvent.click(deleteButton); - - // Should show delete modal (using getAllByText for duplicate elements) - await waitFor(() => { - expect(screen.getAllByText('Delete Instance')[0]).toBeInTheDocument(); - }); - }); - - it('should handle delete error integration', async () => { - // Set up API to fail when deleteInstance is called - const error = new Error('Instance deletion failed'); - (garmApi.deleteInstance as any).mockRejectedValue(error); - - render(InstanceDetailsPage); - - await waitFor(() => { - expect(garmApi.getInstance).toHaveBeenCalled(); - }); - - // Should have error handling infrastructure in place - expect(garmApi.deleteInstance).toBeDefined(); - }); - }); - - describe('API Integration', () => { - it('should call API when component mounts', async () => { - render(InstanceDetailsPage); - - // Wait for API calls to complete and data to be displayed - await waitFor(() => { - // Verify the component actually called the API to load data - expect(garmApi.getInstance).toHaveBeenCalledWith('test-instance'); - }); - }); - - it('should display loading state initially then show data', async () => { - // Mock API response with valid instance data - (garmApi.getInstance as any).mockResolvedValue(mockInstance); - - render(InstanceDetailsPage); - - // Component should render the loading state initially - expect(screen.getByText(/Loading instance details/i)).toBeInTheDocument(); - - // Wait for API call and data to load - await waitFor(() => { - expect(garmApi.getInstance).toHaveBeenCalled(); - }); - - // Wait for component to render the instance information - await waitFor(() => { - expect(screen.getByText('Instance Information')).toBeInTheDocument(); - }); - }); - - it('should handle API errors and display error state', async () => { - // Mock API to fail - const error = new Error('Failed to load instance details'); - (garmApi.getInstance as any).mockRejectedValue(error); - - const { container } = render(InstanceDetailsPage); - - // Wait for error to be handled - await waitFor(() => { - expect(garmApi.getInstance).toHaveBeenCalled(); - }); - - // Should still render page structure even when data loading fails - expect(screen.getByRole('navigation', { name: 'Breadcrumb' })).toBeInTheDocument(); - - // Should display error state in component structure - expect(container).toBeInTheDocument(); - }); - - it('should handle not found state', async () => { - // Mock API to return null - (garmApi.getInstance as any).mockResolvedValue(null); - - render(InstanceDetailsPage); - - await waitFor(() => { - expect(garmApi.getInstance).toHaveBeenCalled(); - }); - - // Should show not found message - expect(screen.getByText(/Instance not found/i)).toBeInTheDocument(); - }); - }); - - describe('WebSocket Integration', () => { - it('should subscribe to websocket events on mount', async () => { - render(InstanceDetailsPage); - - // Wait for component mount - await waitFor(() => { - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith( - 'instance', - ['update', 'delete'], - expect.any(Function) - ); - }); - }); - - it('should handle websocket instance update events', async () => { - render(InstanceDetailsPage); - - await waitFor(() => { - expect(websocketStore.subscribeToEntity).toHaveBeenCalled(); - }); - - // Update event handling should be integrated for real-time updates - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith( - 'instance', - expect.arrayContaining(['update']), - expect.any(Function) - ); - }); - - it('should handle websocket instance delete events', async () => { - const { goto } = await import('$app/navigation'); - - render(InstanceDetailsPage); - - await waitFor(() => { - expect(websocketStore.subscribeToEntity).toHaveBeenCalled(); - }); - - // Delete event handling should be integrated with navigation - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith( - 'instance', - expect.arrayContaining(['delete']), - expect.any(Function) - ); - expect(goto).toBeDefined(); - }); - - it('should clean up websocket subscription on unmount', async () => { - const mockUnsubscribe = vi.fn(); - (websocketStore.subscribeToEntity as any).mockReturnValue(mockUnsubscribe); - - const { unmount } = render(InstanceDetailsPage); - - await waitFor(() => { - expect(websocketStore.subscribeToEntity).toHaveBeenCalled(); - }); - - // Should clean up subscription on unmount - unmount(); - expect(mockUnsubscribe).toHaveBeenCalled(); - }); - - it('should auto-scroll on websocket status message updates', async () => { - const { scrollToBottomEvents } = await import('$lib/utils/common.js'); - - render(InstanceDetailsPage); - - await waitFor(() => { - expect(websocketStore.subscribeToEntity).toHaveBeenCalled(); - }); - - // Should have scroll functionality integrated for real-time message updates - expect(scrollToBottomEvents).toBeDefined(); - }); - }); - - describe('URL Parameter Integration', () => { - it('should handle URL parameter decoding', async () => { - // Mock page store with encoded parameter - const { page } = await import('$app/stores'); - vi.mocked(page.subscribe).mockImplementation((callback: any) => { - callback({ - params: { id: 'test%2Dinstance%2Dwith%2Ddashes' }, - url: { pathname: '/instances/test%2Dinstance%2Dwith%2Ddashes' } - }); - return () => {}; - }); - - render(InstanceDetailsPage); - - await waitFor(() => { - // Should decode URL parameter properly - expect(garmApi.getInstance).toHaveBeenCalledWith('test-instance-with-dashes'); - }); - }); - - it('should handle parameter changes', async () => { - // Reset the page store mock to use default test-instance - const { page } = await import('$app/stores'); - vi.mocked(page.subscribe).mockImplementation((callback: any) => { - callback({ - params: { id: 'test-instance' }, - url: { pathname: '/instances/test-instance' } - }); - return () => {}; - }); - - render(InstanceDetailsPage); - - await waitFor(() => { - expect(garmApi.getInstance).toHaveBeenCalledWith('test-instance'); - }); - - // Should handle dynamic parameter changes - expect(garmApi.getInstance).toBeDefined(); - }); - }); - - describe('Component Integration and State Management', () => { - it('should integrate all sections with proper data flow', async () => { - render(InstanceDetailsPage); - - await waitFor(() => { - // All sections should integrate properly with the main page - expect(screen.getByRole('navigation', { name: 'Breadcrumb' })).toBeInTheDocument(); - expect(garmApi.getInstance).toHaveBeenCalled(); - }); - - // Data flow should be properly integrated through the API system - expect(screen.getByText('Instance Information')).toBeInTheDocument(); - expect(screen.getByText('Status & Network')).toBeInTheDocument(); - }); - - it('should maintain consistent state across components', async () => { - render(InstanceDetailsPage); - - await waitFor(() => { - // State should be consistent across all child components - // Data should be integrated through the API system - expect(garmApi.getInstance).toHaveBeenCalled(); - }); - - // All sections should display consistent data - expect(screen.getAllByText('test-instance')).toHaveLength(2); // breadcrumb + instance info - }); - - it('should handle component lifecycle correctly', () => { - const { unmount } = render(InstanceDetailsPage); - - // Should unmount without errors - expect(() => unmount()).not.toThrow(); - }); - }); - - describe('Conditional Display Integration', () => { - it('should handle optional fields display', async () => { - render(InstanceDetailsPage); - - await waitFor(() => { - expect(garmApi.getInstance).toHaveBeenCalled(); - }); - - // Should display OS information when available - expect(screen.getByText('OS Type:')).toBeInTheDocument(); - expect(screen.getByText('linux')).toBeInTheDocument(); - expect(screen.getByText('OS Version:')).toBeInTheDocument(); - expect(screen.getByText('22.04')).toBeInTheDocument(); - }); - - it('should handle missing optional fields', async () => { - const minimalInstance = { - id: 'inst-123', - name: 'minimal-instance', - created_at: '2024-01-01T00:00:00Z', - status: 'running' - }; - (garmApi.getInstance as any).mockResolvedValue(minimalInstance); - - render(InstanceDetailsPage); - - await waitFor(() => { - expect(garmApi.getInstance).toHaveBeenCalled(); - }); - - // Should handle missing fields gracefully (use getAllByText for instance name) - expect(screen.getAllByText('minimal-instance')[0]).toBeInTheDocument(); - expect(screen.getByText(/Not assigned/i)).toBeInTheDocument(); // agent_id fallback - }); - - it('should show updated at field conditionally', async () => { - const instanceWithUpdate = { - ...mockInstance, - updated_at: '2024-01-02T00:00:00Z' - }; - (garmApi.getInstance as any).mockResolvedValue(instanceWithUpdate); - - render(InstanceDetailsPage); - - await waitFor(() => { - expect(garmApi.getInstance).toHaveBeenCalled(); - }); - - // Should show updated at when different from created at - expect(screen.getByText('Updated At:')).toBeInTheDocument(); - }); - }); - - describe('Error Handling Integration', () => { - it('should integrate comprehensive error handling', async () => { - // Set up various error scenarios - const error = new Error('Network error'); - (garmApi.getInstance as any).mockRejectedValue(error); - - render(InstanceDetailsPage); - - await waitFor(() => { - // Should handle errors gracefully - expect(screen.getByText(/Network error/i)).toBeInTheDocument(); - }); - - // Should maintain page structure during errors - expect(screen.getByRole('navigation', { name: 'Breadcrumb' })).toBeInTheDocument(); - }); - - it('should handle websocket connection errors', async () => { - // Mock websocket to return null (simulating connection failure) - (websocketStore.subscribeToEntity as any).mockReturnValue(null); - - // Should render successfully even with websocket issues - const { container } = render(InstanceDetailsPage); - expect(container).toBeInTheDocument(); - }); - }); - - describe('Accessibility and Responsive Design', () => { - it('should have proper accessibility attributes', async () => { - render(InstanceDetailsPage); - - await waitFor(() => { - // Should have proper ARIA attributes and labels - expect(screen.getByRole('navigation', { name: 'Breadcrumb' })).toBeInTheDocument(); - }); - - // Should have accessible navigation elements - expect(screen.getByRole('link', { name: /Instances/i })).toBeInTheDocument(); - }); - - it('should be responsive across different viewport sizes', async () => { - render(InstanceDetailsPage); - - await waitFor(() => { - // Should render properly across different viewport sizes - expect(garmApi.getInstance).toHaveBeenCalled(); - }); - - // Should have responsive layout classes - expect(document.querySelector('.grid.grid-cols-1.lg\\:grid-cols-2')).toBeInTheDocument(); - }); - - it('should handle screen reader compatibility', async () => { - // Ensure API returns instance data - (garmApi.getInstance as any).mockResolvedValue(mockInstance); - - render(InstanceDetailsPage); - - await waitFor(() => { - // Should be compatible with screen readers - expect(screen.getByRole('navigation', { name: 'Breadcrumb' })).toBeInTheDocument(); - }); - - // Wait for instance data to load and display - await waitFor(() => { - expect(screen.getByText('Instance Information')).toBeInTheDocument(); - }); - }); - }); - - describe('Real-time Updates Integration', () => { - it('should handle real-time instance updates', async () => { - render(InstanceDetailsPage); - - await waitFor(() => { - // Should handle real-time updates through websocket - expect(websocketStore.subscribeToEntity).toHaveBeenCalled(); - }); - - // Real-time update events should be handled - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith( - 'instance', - expect.arrayContaining(['update']), - expect.any(Function) - ); - }); - - it('should handle real-time instance deletion', async () => { - const { goto } = await import('$app/navigation'); - - render(InstanceDetailsPage); - - await waitFor(() => { - // Should handle real-time deletion through websocket - expect(websocketStore.subscribeToEntity).toHaveBeenCalled(); - }); - - // Real-time deletion should trigger navigation - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith( - 'instance', - expect.arrayContaining(['delete']), - expect.any(Function) - ); - expect(goto).toBeDefined(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/instances/[id]/page.render.test.ts b/webapp/src/routes/instances/[id]/page.render.test.ts deleted file mode 100644 index 6a02b232..00000000 --- a/webapp/src/routes/instances/[id]/page.render.test.ts +++ /dev/null @@ -1,455 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render, screen } from '@testing-library/svelte'; -import InstanceDetailsPage from './+page.svelte'; -import { createMockInstance } from '../../../test/factories.js'; - -// Mock all external dependencies -vi.mock('$app/stores', () => ({ - page: { - subscribe: vi.fn((callback) => { - callback({ - params: { id: 'test-instance' }, - url: { pathname: '/instances/test-instance' } - }); - return () => {}; - }) - } -})); - -vi.mock('$app/navigation', () => ({ - goto: vi.fn() -})); - -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path) => path) -})); - -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - getInstance: vi.fn(), - deleteInstance: vi.fn() - } -})); - -vi.mock('$lib/stores/websocket.js', () => ({ - websocketStore: { - subscribeToEntity: vi.fn(() => vi.fn()) - } -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -vi.mock('$lib/utils/status.js', () => ({ - formatStatusText: vi.fn((status) => { - if (!status) return 'Unknown'; - return status.charAt(0).toUpperCase() + status.slice(1); - }), - getStatusBadgeClass: vi.fn((status) => { - switch (status) { - case 'running': return 'bg-green-100 text-green-800 ring-green-200'; - case 'idle': return 'bg-blue-100 text-blue-800 ring-blue-200'; - case 'pending': return 'bg-yellow-100 text-yellow-800 ring-yellow-200'; - case 'error': return 'bg-red-100 text-red-800 ring-red-200'; - default: return 'bg-gray-100 text-gray-800 ring-gray-200'; - } - }) -})); - -vi.mock('$lib/utils/common.js', () => ({ - formatDate: vi.fn((date) => new Date(date).toLocaleString()), - scrollToBottomEvents: vi.fn(), - getEventLevelBadge: vi.fn((level) => ({ - variant: level === 'error' ? 'danger' : level === 'warning' ? 'warning' : 'info', - text: level.toUpperCase() - })) -})); - -const mockInstance = createMockInstance({ - id: 'inst-123', - name: 'test-instance', - provider_id: 'prov-123', - provider_name: 'test-provider', - status: 'running', - runner_status: 'idle', - pool_id: 'pool-123', - addresses: [ - { address: '192.168.1.100', type: 'private' } - ], - status_messages: [ - { - message: 'Instance ready', - event_level: 'info', - created_at: '2024-01-01T10:00:00Z' - } - ] -}); - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/DeleteModal.svelte'); -vi.unmock('$lib/components/Badge.svelte'); - -describe('Instance Details Page - Render Tests', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up default API mocks - const { garmApi } = await import('$lib/api/client.js'); - (garmApi.getInstance as any).mockResolvedValue(mockInstance); - (garmApi.deleteInstance as any).mockResolvedValue({}); - }); - - describe('Basic Rendering', () => { - it('should render without crashing', () => { - const { container } = render(InstanceDetailsPage); - expect(container).toBeInTheDocument(); - }); - - it('should have proper document structure', () => { - const { container } = render(InstanceDetailsPage); - expect(container.querySelector('div')).toBeInTheDocument(); - }); - - it('should render breadcrumb navigation', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have breadcrumb navigation - expect(screen.getByRole('navigation', { name: 'Breadcrumb' })).toBeInTheDocument(); - }); - - it('should render instance information cards', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have main content sections - expect(screen.getByText('Instance Information')).toBeInTheDocument(); - expect(screen.getByText('Status & Network')).toBeInTheDocument(); - }); - }); - - describe('Component Lifecycle', () => { - it('should mount successfully', () => { - const { component } = render(InstanceDetailsPage); - expect(component).toBeDefined(); - }); - - it('should unmount without errors', () => { - const { unmount } = render(InstanceDetailsPage); - expect(() => unmount()).not.toThrow(); - }); - - it('should handle component updates', async () => { - const { component } = render(InstanceDetailsPage); - - // Component should handle reactive updates - expect(component).toBeDefined(); - }); - - it('should load instance on mount', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(InstanceDetailsPage); - - // Wait for component mount and data loading - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should call API to load instance - expect(garmApi.getInstance).toHaveBeenCalledWith('test-instance'); - }); - - it('should subscribe to websocket events on mount', async () => { - const { websocketStore } = await import('$lib/stores/websocket.js'); - - render(InstanceDetailsPage); - - // Wait for component mount - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should subscribe to websocket events - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith( - 'instance', - ['update', 'delete'], - expect.any(Function) - ); - }); - }); - - describe('DOM Structure', () => { - it('should create proper DOM hierarchy', async () => { - const { container } = render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have main container with proper spacing - const mainDiv = container.querySelector('div.space-y-6'); - expect(mainDiv).toBeInTheDocument(); - }); - - it('should render svelte:head for page title', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should set page title - expect(document.title).toContain('test-instance - Instance Details - GARM'); - }); - - it('should handle error display conditionally', async () => { - const { garmApi } = await import('$lib/api/client.js'); - (garmApi.getInstance as any).mockRejectedValue(new Error('Test error')); - - render(InstanceDetailsPage); - - // Wait for error - await new Promise(resolve => setTimeout(resolve, 100)); - - // Error display should be conditional - expect(screen.getByText(/Test error/i)).toBeInTheDocument(); - }); - - it('should render loading state initially', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - // Mock delayed response - (garmApi.getInstance as any).mockImplementation(() => - new Promise(resolve => setTimeout(() => resolve(mockInstance), 200)) - ); - - render(InstanceDetailsPage); - - // Should show loading initially - expect(screen.getByText(/Loading instance details/i)).toBeInTheDocument(); - }); - }); - - describe('Information Cards Rendering', () => { - it('should render instance information card', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should render instance information card - expect(screen.getByText('Instance Information')).toBeInTheDocument(); - expect(screen.getByText('ID:')).toBeInTheDocument(); - expect(screen.getByText('Name:')).toBeInTheDocument(); - }); - - it('should render status and network card', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should render status card - expect(screen.getByText('Status & Network')).toBeInTheDocument(); - expect(screen.getByText('Instance Status:')).toBeInTheDocument(); - expect(screen.getByText('Runner Status:')).toBeInTheDocument(); - }); - - it('should render network addresses section', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should render network section - expect(screen.getByText('Network Addresses:')).toBeInTheDocument(); - expect(screen.getByText('192.168.1.100')).toBeInTheDocument(); - }); - - it('should render OS information conditionally', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should render OS information when available - expect(screen.getByText('OS Type:')).toBeInTheDocument(); - expect(screen.getByText('OS Architecture:')).toBeInTheDocument(); - }); - }); - - describe('Status Messages Rendering', () => { - it('should render status messages when available', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should render status messages section - expect(screen.getByText('Status Messages')).toBeInTheDocument(); - expect(screen.getByText('Instance ready')).toBeInTheDocument(); - }); - - it('should render empty state when no messages', async () => { - const instanceWithoutMessages = { ...mockInstance, status_messages: [] }; - const { garmApi } = await import('$lib/api/client.js'); - (garmApi.getInstance as any).mockResolvedValue(instanceWithoutMessages); - - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should render empty state - expect(screen.getByText(/No status messages available/i)).toBeInTheDocument(); - }); - - it('should render scrollable container for messages', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have scrollable container - const messagesContainer = document.querySelector('.max-h-96.overflow-y-auto'); - expect(messagesContainer).toBeInTheDocument(); - }); - }); - - describe('Modal Rendering', () => { - it('should conditionally render delete modal', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Delete modal should not be visible initially (check for modal-specific text) - expect(screen.queryByText('Are you sure you want to delete this instance? This action cannot be undone.')).not.toBeInTheDocument(); - }); - - it('should render delete button', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have delete button - expect(screen.getByRole('button', { name: /Delete Instance/i })).toBeInTheDocument(); - }); - }); - - describe('WebSocket Lifecycle', () => { - it('should clean up websocket subscription on unmount', async () => { - const mockUnsubscribe = vi.fn(); - const { websocketStore } = await import('$lib/stores/websocket.js'); - (websocketStore.subscribeToEntity as any).mockReturnValue(mockUnsubscribe); - - const { unmount } = render(InstanceDetailsPage); - - // Wait for mount - await new Promise(resolve => setTimeout(resolve, 0)); - - // Unmount and verify cleanup - unmount(); - expect(mockUnsubscribe).toHaveBeenCalled(); - }); - - it('should handle websocket subscription errors gracefully', async () => { - const { websocketStore } = await import('$lib/stores/websocket.js'); - (websocketStore.subscribeToEntity as any).mockReturnValue(null); - - // Should render successfully even with websocket issues - const { container } = render(InstanceDetailsPage); - expect(container).toBeInTheDocument(); - }); - }); - - describe('Navigation Elements', () => { - it('should render breadcrumb links correctly', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have correct breadcrumb structure - const instancesLink = screen.getByRole('link', { name: /Instances/i }); - expect(instancesLink).toHaveAttribute('href', '/instances'); - }); - - it('should render pool/scale set links when available', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have pool link - const poolLink = screen.getByRole('link', { name: 'pool-123' }); - expect(poolLink).toHaveAttribute('href', '/pools/pool-123'); - }); - }); - - describe('Conditional Content Rendering', () => { - it('should render different states based on data availability', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should adapt rendering based on available data - expect(screen.getByText('Instance Information')).toBeInTheDocument(); - }); - - it('should handle not found state', async () => { - const { garmApi } = await import('$lib/api/client.js'); - (garmApi.getInstance as any).mockResolvedValue(null); - - render(InstanceDetailsPage); - - // Wait for loading to complete - await new Promise(resolve => setTimeout(resolve, 100)); - - // Should show not found state - expect(screen.getByText(/Instance not found/i)).toBeInTheDocument(); - }); - - it('should render updated at field conditionally', async () => { - const instanceWithUpdate = { - ...mockInstance, - updated_at: '2024-01-02T00:00:00Z' - }; - const { garmApi } = await import('$lib/api/client.js'); - (garmApi.getInstance as any).mockResolvedValue(instanceWithUpdate); - - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should show updated at when different from created at - expect(screen.getByText('Updated At:')).toBeInTheDocument(); - }); - }); - - describe('Responsive Layout', () => { - it('should use responsive grid layout', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have responsive grid - const gridContainer = document.querySelector('.grid.grid-cols-1.lg\\:grid-cols-2'); - expect(gridContainer).toBeInTheDocument(); - }); - - it('should handle mobile-friendly layout', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have mobile-responsive classes - expect(document.querySelector('.space-x-1.md\\:space-x-3')).toBeInTheDocument(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/instances/[id]/page.test.ts b/webapp/src/routes/instances/[id]/page.test.ts deleted file mode 100644 index e4db8c0a..00000000 --- a/webapp/src/routes/instances/[id]/page.test.ts +++ /dev/null @@ -1,554 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render, screen } from '@testing-library/svelte'; -import InstanceDetailsPage from './+page.svelte'; -import { createMockInstance } from '../../../test/factories.js'; - -// Mock the page stores -vi.mock('$app/stores', () => ({ - page: { - subscribe: vi.fn((callback) => { - callback({ - params: { id: 'test-instance' }, - url: { pathname: '/instances/test-instance' } - }); - return () => {}; - }) - } -})); - -// Mock navigation -vi.mock('$app/navigation', () => ({ - goto: vi.fn() -})); - -// Mock paths -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path) => path) -})); - -// Mock the API client -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - getInstance: vi.fn(), - deleteInstance: vi.fn() - } -})); - -// Mock stores -vi.mock('$lib/stores/websocket.js', () => ({ - websocketStore: { - subscribeToEntity: vi.fn(() => vi.fn()) - } -})); - -// Mock utilities -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -vi.mock('$lib/utils/status.js', () => ({ - formatStatusText: vi.fn((status) => { - if (!status) return 'Unknown'; - return status.charAt(0).toUpperCase() + status.slice(1); - }), - getStatusBadgeClass: vi.fn((status) => { - switch (status) { - case 'running': return 'bg-green-100 text-green-800 ring-green-200'; - case 'idle': return 'bg-blue-100 text-blue-800 ring-blue-200'; - case 'pending': return 'bg-yellow-100 text-yellow-800 ring-yellow-200'; - case 'error': return 'bg-red-100 text-red-800 ring-red-200'; - default: return 'bg-gray-100 text-gray-800 ring-gray-200'; - } - }) -})); - -vi.mock('$lib/utils/common.js', () => ({ - formatDate: vi.fn((date) => new Date(date).toLocaleString()), - scrollToBottomEvents: vi.fn(), - getEventLevelBadge: vi.fn((level) => ({ - variant: level === 'error' ? 'danger' : level === 'warning' ? 'warning' : 'info', - text: level.toUpperCase() - })) -})); - -const mockInstance = createMockInstance({ - id: 'inst-123', - name: 'test-instance', - provider_id: 'prov-123', - provider_name: 'test-provider', - status: 'running', - runner_status: 'idle', - agent_id: 12345, - pool_id: 'pool-123', - os_type: 'linux', - os_name: 'ubuntu', - os_arch: 'amd64', - addresses: [ - { address: '192.168.1.100', type: 'private' }, - { address: '203.0.113.10', type: 'public' } - ], - status_messages: [ - { - message: 'Instance started successfully', - event_level: 'info', - created_at: '2024-01-01T10:00:00Z' - }, - { - message: 'Warning: High memory usage', - event_level: 'warning', - created_at: '2024-01-01T11:00:00Z' - } - ] -}); - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/DeleteModal.svelte'); -vi.unmock('$lib/components/Badge.svelte'); - -describe('Instance Details Page - Unit Tests', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up default API mock - const { garmApi } = await import('$lib/api/client.js'); - (garmApi.getInstance as any).mockResolvedValue(mockInstance); - (garmApi.deleteInstance as any).mockResolvedValue({}); - }); - - describe('Component Initialization', () => { - it('should render successfully', () => { - const { container } = render(InstanceDetailsPage); - expect(container).toBeInTheDocument(); - }); - - it('should set page title with instance name', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - expect(document.title).toContain('test-instance - Instance Details - GARM'); - }); - - it('should set fallback page title when no instance', async () => { - const { garmApi } = await import('$lib/api/client.js'); - (garmApi.getInstance as any).mockRejectedValue(new Error('Instance not found')); - - render(InstanceDetailsPage); - - expect(document.title).toContain('Instance Details - GARM'); - }); - }); - - describe('Data Loading', () => { - it('should load instance on mount', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(InstanceDetailsPage); - - // Wait for component mount - await new Promise(resolve => setTimeout(resolve, 0)); - - expect(garmApi.getInstance).toHaveBeenCalledWith('test-instance'); - }); - - it('should handle loading state', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - // Mock delayed response - (garmApi.getInstance as any).mockImplementation(() => - new Promise(resolve => setTimeout(() => resolve(mockInstance), 100)) - ); - - render(InstanceDetailsPage); - - // Should show loading state initially - expect(screen.getByText(/Loading instance details/i)).toBeInTheDocument(); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 150)); - - // Loading should be gone - expect(screen.queryByText(/Loading instance details/i)).not.toBeInTheDocument(); - }); - - it('should handle API error state', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - // Mock API to fail - const error = new Error('Failed to load instance'); - (garmApi.getInstance as any).mockRejectedValue(error); - - render(InstanceDetailsPage); - - // Wait for the error to be handled - await new Promise(resolve => setTimeout(resolve, 100)); - - // Should display error - expect(screen.getByText(/Failed to load instance/i)).toBeInTheDocument(); - }); - }); - - describe('Instance Information Display', () => { - it('should display instance basic information', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should display instance details - expect(screen.getByText('Instance Information')).toBeInTheDocument(); - expect(screen.getAllByText('test-instance')[0]).toBeInTheDocument(); - expect(screen.getByText('inst-123')).toBeInTheDocument(); - expect(screen.getByText('prov-123')).toBeInTheDocument(); - }); - - it('should display status information', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should display status section - expect(screen.getByText('Status & Network')).toBeInTheDocument(); - expect(screen.getByText('Instance Status:')).toBeInTheDocument(); - expect(screen.getByText('Runner Status:')).toBeInTheDocument(); - }); - - it('should display network addresses when available', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should display network addresses - expect(screen.getByText('Network Addresses:')).toBeInTheDocument(); - expect(screen.getByText('192.168.1.100')).toBeInTheDocument(); - expect(screen.getByText('203.0.113.10')).toBeInTheDocument(); - }); - - it('should handle missing network addresses', async () => { - const instanceWithoutAddresses = { ...mockInstance, addresses: [] }; - const { garmApi } = await import('$lib/api/client.js'); - (garmApi.getInstance as any).mockResolvedValue(instanceWithoutAddresses); - - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should show no addresses message - expect(screen.getByText(/No addresses available/i)).toBeInTheDocument(); - }); - }); - - describe('Pool/Scale Set Links', () => { - it('should display pool link when pool_id exists', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have pool link - const poolLink = screen.getByRole('link', { name: 'pool-123' }); - expect(poolLink).toBeInTheDocument(); - expect(poolLink).toHaveAttribute('href', '/pools/pool-123'); - }); - - it('should display scale set link when scale_set_id exists', async () => { - const instanceWithScaleSet = { ...mockInstance, pool_id: undefined, scale_set_id: 'scaleset-123' }; - const { garmApi } = await import('$lib/api/client.js'); - (garmApi.getInstance as any).mockResolvedValue(instanceWithScaleSet); - - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have scale set link - const scaleSetLink = screen.getByRole('link', { name: 'scaleset-123' }); - expect(scaleSetLink).toBeInTheDocument(); - expect(scaleSetLink).toHaveAttribute('href', '/scalesets/scaleset-123'); - }); - - it('should show dash when no pool or scale set', async () => { - const instanceWithoutPoolOrScaleSet = { ...mockInstance, pool_id: undefined, scale_set_id: undefined }; - const { garmApi } = await import('$lib/api/client.js'); - (garmApi.getInstance as any).mockResolvedValue(instanceWithoutPoolOrScaleSet); - - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should show dash - expect(screen.getByText('-')).toBeInTheDocument(); - }); - }); - - describe('Status Messages', () => { - it('should display status messages when available', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should display status messages - expect(screen.getByText('Status Messages')).toBeInTheDocument(); - expect(screen.getByText('Instance started successfully')).toBeInTheDocument(); - expect(screen.getByText('Warning: High memory usage')).toBeInTheDocument(); - }); - - it('should handle empty status messages', async () => { - const instanceWithoutMessages = { ...mockInstance, status_messages: [] }; - const { garmApi } = await import('$lib/api/client.js'); - (garmApi.getInstance as any).mockResolvedValue(instanceWithoutMessages); - - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should show no messages state - expect(screen.getByText(/No status messages available/i)).toBeInTheDocument(); - }); - - it('should auto-scroll status messages on load', async () => { - const { scrollToBottomEvents } = await import('$lib/utils/common.js'); - - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 200)); - - // Should call scroll function - expect(scrollToBottomEvents).toHaveBeenCalled(); - }); - }); - - describe('Delete Functionality', () => { - it('should show delete button', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have delete button - expect(screen.getByRole('button', { name: /Delete Instance/i })).toBeInTheDocument(); - }); - - it('should handle delete instance', async () => { - const { garmApi } = await import('$lib/api/client.js'); - const { goto } = await import('$app/navigation'); - - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Delete API should be available - expect(garmApi.deleteInstance).toBeDefined(); - expect(goto).toBeDefined(); - }); - - it('should handle delete error', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - // Mock delete to fail - const error = new Error('Delete failed'); - (garmApi.deleteInstance as any).mockRejectedValue(error); - - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have error handling ready - expect(screen.getByRole('button', { name: /Delete Instance/i })).toBeInTheDocument(); - }); - }); - - describe('WebSocket Integration', () => { - it('should subscribe to websocket events on mount', async () => { - const { websocketStore } = await import('$lib/stores/websocket.js'); - - render(InstanceDetailsPage); - - // Wait for component mount - await new Promise(resolve => setTimeout(resolve, 0)); - - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith( - 'instance', - ['update', 'delete'], - expect.any(Function) - ); - }); - - it('should handle websocket instance update events', async () => { - const { websocketStore } = await import('$lib/stores/websocket.js'); - - render(InstanceDetailsPage); - - // Wait for component mount - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should subscribe to update events - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith( - 'instance', - expect.arrayContaining(['update']), - expect.any(Function) - ); - }); - - it('should handle websocket instance delete events', async () => { - const { websocketStore } = await import('$lib/stores/websocket.js'); - const { goto } = await import('$app/navigation'); - - render(InstanceDetailsPage); - - // Wait for component mount - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should subscribe to delete events and have navigation ready - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith( - 'instance', - expect.arrayContaining(['delete']), - expect.any(Function) - ); - expect(goto).toBeDefined(); - }); - - it('should unsubscribe from websocket on destroy', async () => { - const mockUnsubscribe = vi.fn(); - const { websocketStore } = await import('$lib/stores/websocket.js'); - (websocketStore.subscribeToEntity as any).mockReturnValue(mockUnsubscribe); - - const { unmount } = render(InstanceDetailsPage); - - // Wait for component mount - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have subscribed - expect(websocketStore.subscribeToEntity).toHaveBeenCalled(); - - // Unmount should call unsubscribe - unmount(); - expect(mockUnsubscribe).toHaveBeenCalled(); - }); - }); - - describe('Breadcrumb Navigation', () => { - it('should display breadcrumb navigation', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have breadcrumb navigation - expect(screen.getByRole('navigation', { name: 'Breadcrumb' })).toBeInTheDocument(); - expect(screen.getByRole('link', { name: /Instances/i })).toBeInTheDocument(); - }); - - it('should link back to instances list', async () => { - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have link back to instances - const instancesLink = screen.getByRole('link', { name: /Instances/i }); - expect(instancesLink).toHaveAttribute('href', '/instances'); - }); - }); - - describe('Component Lifecycle', () => { - it('should mount successfully', () => { - const component = render(InstanceDetailsPage); - expect(component.component).toBeDefined(); - }); - - it('should unmount without errors', () => { - const { unmount } = render(InstanceDetailsPage); - expect(() => unmount()).not.toThrow(); - }); - - it('should handle parameter changes', async () => { - // Simulate parameter change by remocking the page store - const storesModule = await import('$app/stores'); - vi.mocked(storesModule.page.subscribe).mockImplementation((callback: any) => { - callback({ - params: { id: 'different-instance' }, - url: new URL('/instances/different-instance', 'http://localhost') - }); - return () => {}; - }); - - const { garmApi } = await import('$lib/api/client.js'); - - render(InstanceDetailsPage); - - // Should handle parameter change - expect(garmApi.getInstance).toBeDefined(); - }); - }); - - describe('Error Handling', () => { - it('should display not found state when instance is null', async () => { - const { garmApi } = await import('$lib/api/client.js'); - (garmApi.getInstance as any).mockResolvedValue(null); - - render(InstanceDetailsPage); - - // Wait for loading to complete - await new Promise(resolve => setTimeout(resolve, 100)); - - // Should show not found message - expect(screen.getByText(/Instance not found/i)).toBeInTheDocument(); - }); - - it('should handle missing optional fields gracefully', async () => { - const minimalInstance = { - id: 'inst-123', - name: 'minimal-instance', - created_at: '2024-01-01T00:00:00Z', - status: 'running' - }; - - const { garmApi } = await import('$lib/api/client.js'); - (garmApi.getInstance as any).mockResolvedValue(minimalInstance); - - render(InstanceDetailsPage); - - // Wait for instance to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should handle missing fields gracefully (use getAllByText for instance name) - expect(screen.getAllByText('minimal-instance')[0]).toBeInTheDocument(); - expect(screen.getByText(/Not assigned/i)).toBeInTheDocument(); // agent_id fallback - }); - }); - - describe('URL Parameter Handling', () => { - it('should decode URL-encoded instance names', async () => { - // Mock page store with encoded name - const { page } = await import('$app/stores'); - vi.mocked(page.subscribe).mockImplementation((callback: any) => { - callback({ - params: { id: 'test%2Dinstance%2Dwith%2Ddashes' }, - url: { pathname: '/instances/test%2Dinstance%2Dwith%2Ddashes' } - }); - return () => {}; - }); - - const { garmApi } = await import('$lib/api/client.js'); - - render(InstanceDetailsPage); - - // Wait for component mount - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should decode the parameter - expect(garmApi.getInstance).toHaveBeenCalledWith('test-instance-with-dashes'); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/instances/page.integration.test.ts b/webapp/src/routes/instances/page.integration.test.ts deleted file mode 100644 index 31f02fed..00000000 --- a/webapp/src/routes/instances/page.integration.test.ts +++ /dev/null @@ -1,569 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render, screen, waitFor } from '@testing-library/svelte'; -import InstancesPage from './+page.svelte'; -import { createMockInstance } from '../../test/factories.js'; - -// Mock app stores and navigation -vi.mock('$app/stores', () => ({})); - -vi.mock('$app/navigation', () => ({})); - -const mockInstance1 = createMockInstance({ - id: 'inst-123', - name: 'test-instance-1', - provider_id: 'prov-123', - status: 'running', - runner_status: 'idle' -}); - -const mockInstance2 = createMockInstance({ - id: 'inst-456', - name: 'test-instance-2', - provider_id: 'prov-456', - status: 'stopped', - runner_status: 'busy' -}); - -const mockInstances = [mockInstance1, mockInstance2]; - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/PageHeader.svelte'); -vi.unmock('$lib/components/DataTable.svelte'); -vi.unmock('$lib/components/DeleteModal.svelte'); -vi.unmock('$lib/components/cells'); - -// Only mock the data layer - APIs and stores -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - listInstances: vi.fn(), - deleteInstance: vi.fn() - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn() - } -})); - -vi.mock('$lib/stores/websocket.js', () => ({ - websocketStore: { - subscribeToEntity: vi.fn(() => vi.fn()) - } -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -// Global setup for each test -let garmApi: any; -let websocketStore: any; - -describe('Comprehensive Integration Tests for Instances Page', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up API mocks with default successful responses - const apiModule = await import('$lib/api/client.js'); - garmApi = apiModule.garmApi; - - const wsModule = await import('$lib/stores/websocket.js'); - websocketStore = wsModule.websocketStore; - - (garmApi.listInstances as any).mockResolvedValue(mockInstances); - (garmApi.deleteInstance as any).mockResolvedValue({}); - (websocketStore.subscribeToEntity as any).mockReturnValue(vi.fn()); - }); - - describe('Component Rendering and Data Display', () => { - it('should render instances page with real components', async () => { - render(InstancesPage); - - await waitFor(() => { - // Wait for data to load - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - - // Should render the page header - expect(screen.getByRole('heading', { name: 'Runner Instances' })).toBeInTheDocument(); - - // Should render page description - expect(screen.getByText(/Monitor your running instances/i)).toBeInTheDocument(); - }); - - it('should display instances data in the table', async () => { - render(InstancesPage); - - await waitFor(() => { - // Wait for data loading to complete - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - - // Component should render the DataTable component which would display instance data - // The exact instance names may not be visible due to how the DataTable renders data - // but the structure should be in place for displaying instances - expect(document.body).toBeInTheDocument(); - }); - - it('should render all major sections when data is loaded', async () => { - render(InstancesPage); - - await waitFor(() => { - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - - // Should show the data table structure - expect(document.body).toBeInTheDocument(); - - // Should not have an action button (instances page is read-only) - expect(screen.queryByRole('button', { name: /Add/i })).not.toBeInTheDocument(); - }); - }); - - describe('Search and Filtering Integration', () => { - it('should handle search functionality', async () => { - render(InstancesPage); - - await waitFor(() => { - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - - // Search functionality should be integrated - expect(screen.getByPlaceholderText(/Search instances/i)).toBeInTheDocument(); - }); - - it('should filter instances based on search term', async () => { - render(InstancesPage); - - await waitFor(() => { - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - - // Component should have filtering logic for instances - expect(document.body).toBeInTheDocument(); - }); - - it('should handle status filtering', async () => { - render(InstancesPage); - - await waitFor(() => { - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - - // Component should filter by both status and runner_status - expect(document.body).toBeInTheDocument(); - }); - }); - - describe('Pagination Integration', () => { - it('should handle pagination with real data', async () => { - render(InstancesPage); - - await waitFor(() => { - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - - // Should handle pagination for instances data - expect(document.body).toBeInTheDocument(); - }); - - it('should handle per-page changes', async () => { - render(InstancesPage); - - await waitFor(() => { - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - - // Change per page functionality should be available - expect(document.body).toBeInTheDocument(); - }); - }); - - describe('Modal Integration', () => { - it('should handle delete instance modal workflow', async () => { - render(InstancesPage); - - await waitFor(() => { - // Wait for data to load through API integration - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - - // Delete API should be available for the delete workflow - expect(garmApi.deleteInstance).toBeDefined(); - - // Confirmation modal and error handling should be integrated - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.success).toBeDefined(); - expect(toastStore.error).toBeDefined(); - - // The delete functionality should be integrated through the DataTable component - // Delete buttons may not be visible when no data is loaded, but the infrastructure should be in place - expect(screen.getByRole('heading', { name: 'Runner Instances' })).toBeInTheDocument(); - }); - - it('should not have create or edit modals', async () => { - render(InstancesPage); - - await waitFor(() => { - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - - // Instances are read-only - no create or edit functionality - expect(screen.queryByRole('button', { name: /Add/i })).not.toBeInTheDocument(); - expect(screen.queryByRole('button', { name: /Edit/i })).not.toBeInTheDocument(); - }); - }); - - describe('API Integration', () => { - it('should call API when component mounts', async () => { - render(InstancesPage); - - // Wait for API calls to complete and data to be displayed - await waitFor(() => { - // Verify the component actually called the API to load data - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - }); - - it('should display loading state initially then show data', async () => { - // Mock delayed API response - (garmApi.listInstances as any).mockImplementation(() => - new Promise(resolve => setTimeout(() => resolve(mockInstances), 100)) - ); - - render(InstancesPage); - - // Component should render the basic structure immediately - expect(screen.getByRole('heading', { name: 'Runner Instances' })).toBeInTheDocument(); - - // After API resolves, data loading should be complete - await waitFor(() => { - expect(garmApi.listInstances).toHaveBeenCalled(); - }, { timeout: 1000 }); - - // Component should handle data loading properly - expect(screen.getByText(/Monitor your running instances/i)).toBeInTheDocument(); - }); - - it('should handle API errors and display error state', async () => { - // Mock API to fail - const error = new Error('Failed to load instances'); - (garmApi.listInstances as any).mockRejectedValue(error); - - const { container } = render(InstancesPage); - - // Wait for error to be handled - await waitFor(() => { - // Component should handle the error gracefully and continue to render - expect(container).toBeInTheDocument(); - }); - - // Should still render page structure even when data loading fails - expect(screen.getByRole('heading', { name: 'Runner Instances' })).toBeInTheDocument(); - }); - - it('should handle retry functionality', async () => { - render(InstancesPage); - - await waitFor(() => { - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - - // Retry functionality should be available - expect(garmApi.listInstances).toBeDefined(); - }); - }); - - describe('Instance Deletion Integration', () => { - it('should integrate instance deletion workflow', async () => { - render(InstancesPage); - - await waitFor(() => { - // Wait for data loading to be called - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - - // Deletion functionality should be available - expect(garmApi.deleteInstance).toBeDefined(); - - // Component should be ready to handle instance deletion - expect(screen.getByRole('heading', { name: 'Runner Instances' })).toBeInTheDocument(); - }); - - it('should show error handling structure for instance deletion', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - // Set up API to fail when deleteInstance is called - const error = new Error('Instance deletion failed'); - (garmApi.deleteInstance as any).mockRejectedValue(error); - - render(InstancesPage); - - await waitFor(() => { - // Wait for data loading to be called - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - - // Verify the component has the proper structure for deletion error handling - expect(toastStore.error).toBeDefined(); - expect(screen.getByRole('heading', { name: 'Runner Instances' })).toBeInTheDocument(); - }); - }); - - describe('WebSocket Integration', () => { - it('should subscribe to websocket events on mount', async () => { - render(InstancesPage); - - // Wait for component mount - await waitFor(() => { - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith( - 'instance', - ['create', 'update', 'delete'], - expect.any(Function) - ); - }); - }); - - it('should handle websocket instance create events', async () => { - render(InstancesPage); - - await waitFor(() => { - expect(websocketStore.subscribeToEntity).toHaveBeenCalled(); - }); - - // WebSocket event handling should be integrated - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith( - 'instance', - ['create', 'update', 'delete'], - expect.any(Function) - ); - }); - - it('should handle websocket instance update events', async () => { - render(InstancesPage); - - await waitFor(() => { - expect(websocketStore.subscribeToEntity).toHaveBeenCalled(); - }); - - // Update event handling should be integrated for real-time updates - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith( - 'instance', - ['create', 'update', 'delete'], - expect.any(Function) - ); - }); - - it('should handle websocket instance delete events', async () => { - render(InstancesPage); - - await waitFor(() => { - expect(websocketStore.subscribeToEntity).toHaveBeenCalled(); - }); - - // Delete event handling should be integrated for real-time updates - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith( - 'instance', - ['create', 'update', 'delete'], - expect.any(Function) - ); - }); - - it('should clean up websocket subscription on unmount', async () => { - const mockUnsubscribe = vi.fn(); - (websocketStore.subscribeToEntity as any).mockReturnValue(mockUnsubscribe); - - const { unmount } = render(InstancesPage); - - await waitFor(() => { - expect(websocketStore.subscribeToEntity).toHaveBeenCalled(); - }); - - // Should clean up subscription on unmount - unmount(); - expect(mockUnsubscribe).toHaveBeenCalled(); - }); - }); - - describe('Component Integration and State Management', () => { - it('should integrate all sections with proper data flow', async () => { - render(InstancesPage); - - await waitFor(() => { - // All sections should integrate properly with the main page - expect(screen.getByRole('heading', { name: 'Runner Instances' })).toBeInTheDocument(); - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - - // Data flow should be properly integrated through the API system - expect(screen.getByText(/Monitor your running instances/i)).toBeInTheDocument(); - }); - - it('should maintain consistent state across components', async () => { - render(InstancesPage); - - await waitFor(() => { - // State should be consistent across all child components - // Data should be integrated through the API system - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - }); - - it('should handle component lifecycle correctly', () => { - const { unmount } = render(InstancesPage); - - // Should unmount without errors - expect(() => unmount()).not.toThrow(); - }); - }); - - describe('User Interaction Flows', () => { - it('should support various user interaction flows', async () => { - render(InstancesPage); - - await waitFor(() => { - // Should support user interactions like search, pagination, delete operations - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - - // Should have search functionality available - expect(screen.getByPlaceholderText(/Search instances/i)).toBeInTheDocument(); - }); - - it('should handle read-only interaction patterns', async () => { - render(InstancesPage); - - await waitFor(() => { - // Should handle read-only patterns (no create/edit) - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - - // Should not have create/edit buttons - expect(screen.queryByRole('button', { name: /Add/i })).not.toBeInTheDocument(); - expect(screen.queryByRole('button', { name: /Edit/i })).not.toBeInTheDocument(); - }); - }); - - describe('Accessibility and Responsive Design', () => { - it('should have proper accessibility attributes', async () => { - render(InstancesPage); - - await waitFor(() => { - // Should have proper ARIA attributes and labels - expect(screen.getByRole('heading', { name: 'Runner Instances' })).toBeInTheDocument(); - }); - }); - - it('should be responsive across different viewport sizes', async () => { - render(InstancesPage); - - await waitFor(() => { - // Should render properly across different viewport sizes - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - - // Page structure should be responsive - expect(screen.getByRole('heading', { name: 'Runner Instances' })).toBeInTheDocument(); - }); - - it('should handle screen reader compatibility', async () => { - render(InstancesPage); - - await waitFor(() => { - // Should be compatible with screen readers - expect(screen.getByRole('heading', { name: 'Runner Instances' })).toBeInTheDocument(); - }); - }); - }); - - describe('Status and State Handling', () => { - it('should handle instance status display', async () => { - render(InstancesPage); - - await waitFor(() => { - // Instance status should be properly displayed - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - - // Should handle both status and runner_status fields - expect(document.body).toBeInTheDocument(); - }); - - it('should handle runner status display', async () => { - render(InstancesPage); - - await waitFor(() => { - // Runner status should be properly displayed - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - - // Should display runner-specific status information - expect(document.body).toBeInTheDocument(); - }); - - it('should handle status filtering logic', async () => { - render(InstancesPage); - - await waitFor(() => { - // Status filtering should work for both status types - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - - // Should filter by both status and runner_status - expect(document.body).toBeInTheDocument(); - }); - }); - - describe('Real-time Updates', () => { - it('should handle real-time instance creation', async () => { - render(InstancesPage); - - await waitFor(() => { - // Should handle real-time updates through websocket - expect(websocketStore.subscribeToEntity).toHaveBeenCalled(); - }); - - // Real-time creation events should be handled - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith( - 'instance', - expect.arrayContaining(['create']), - expect.any(Function) - ); - }); - - it('should handle real-time instance updates', async () => { - render(InstancesPage); - - await waitFor(() => { - // Should handle real-time updates through websocket - expect(websocketStore.subscribeToEntity).toHaveBeenCalled(); - }); - - // Real-time update events should be handled - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith( - 'instance', - expect.arrayContaining(['update']), - expect.any(Function) - ); - }); - - it('should handle real-time instance deletion', async () => { - render(InstancesPage); - - await waitFor(() => { - // Should handle real-time updates through websocket - expect(websocketStore.subscribeToEntity).toHaveBeenCalled(); - }); - - // Real-time deletion events should be handled - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith( - 'instance', - expect.arrayContaining(['delete']), - expect.any(Function) - ); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/instances/page.render.test.ts b/webapp/src/routes/instances/page.render.test.ts deleted file mode 100644 index be7e3057..00000000 --- a/webapp/src/routes/instances/page.render.test.ts +++ /dev/null @@ -1,211 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render, screen } from '@testing-library/svelte'; -import InstancesPage from './+page.svelte'; -import { createMockInstance } from '../../test/factories.js'; - -// Mock all external dependencies -vi.mock('$app/stores', () => ({})); - -vi.mock('$app/navigation', () => ({})); - -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - listInstances: vi.fn(), - deleteInstance: vi.fn() - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn() - } -})); - -vi.mock('$lib/stores/websocket.js', () => ({ - websocketStore: { - subscribeToEntity: vi.fn(() => vi.fn()) - } -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -const mockInstance = createMockInstance({ - name: 'test-instance', - provider_id: 'prov-123', - status: 'running', - runner_status: 'idle' -}); - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/PageHeader.svelte'); -vi.unmock('$lib/components/DataTable.svelte'); -vi.unmock('$lib/components/DeleteModal.svelte'); -vi.unmock('$lib/components/cells'); - -describe('Instances Page - Render Tests', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up default API mocks - const { garmApi } = await import('$lib/api/client.js'); - (garmApi.listInstances as any).mockResolvedValue([mockInstance]); - }); - - describe('Basic Rendering', () => { - it('should render without crashing', () => { - const { container } = render(InstancesPage); - expect(container).toBeInTheDocument(); - }); - - it('should have proper document structure', () => { - const { container } = render(InstancesPage); - expect(container.querySelector('div')).toBeInTheDocument(); - }); - - it('should render page header', () => { - const { container } = render(InstancesPage); - // Should have page header component - expect(container).toBeInTheDocument(); - }); - - it('should render data table', () => { - const { container } = render(InstancesPage); - // Should have DataTable component - expect(container).toBeInTheDocument(); - }); - }); - - describe('Component Lifecycle', () => { - it('should mount successfully', () => { - const { component } = render(InstancesPage); - expect(component).toBeDefined(); - }); - - it('should unmount without errors', () => { - const { unmount } = render(InstancesPage); - expect(() => unmount()).not.toThrow(); - }); - - it('should handle component updates', async () => { - const { component } = render(InstancesPage); - - // Component should handle reactive updates - expect(component).toBeDefined(); - }); - - it('should load instances on mount', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(InstancesPage); - - // Wait for component mount and data loading - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should call API to load instances - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - - it('should subscribe to websocket events on mount', async () => { - const { websocketStore } = await import('$lib/stores/websocket.js'); - - render(InstancesPage); - - // Wait for component mount - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should subscribe to websocket events - expect(websocketStore.subscribeToEntity).toHaveBeenCalled(); - }); - }); - - describe('DOM Structure', () => { - it('should create proper DOM hierarchy', () => { - const { container } = render(InstancesPage); - - // Should have main container with proper spacing - const mainDiv = container.querySelector('div.space-y-6'); - expect(mainDiv).toBeInTheDocument(); - }); - - it('should render svelte:head for page title', async () => { - render(InstancesPage); - - // Should set page title - expect(document.title).toContain('Instances - GARM'); - }); - - it('should handle error display conditionally', () => { - const { container } = render(InstancesPage); - - // Error display should be conditional - expect(container).toBeInTheDocument(); - }); - }); - - describe('Modal Rendering', () => { - it('should conditionally render delete modal', () => { - const { container } = render(InstancesPage); - - // Delete modal should not be visible initially - expect(container).toBeInTheDocument(); - }); - - it('should handle modal state management', () => { - const { container } = render(InstancesPage); - - // Modal state should be properly managed - expect(container).toBeInTheDocument(); - }); - }); - - describe('WebSocket Lifecycle', () => { - it('should clean up websocket subscription on unmount', async () => { - const mockUnsubscribe = vi.fn(); - const { websocketStore } = await import('$lib/stores/websocket.js'); - (websocketStore.subscribeToEntity as any).mockReturnValue(mockUnsubscribe); - - const { unmount } = render(InstancesPage); - - // Wait for mount - await new Promise(resolve => setTimeout(resolve, 0)); - - // Unmount and verify cleanup - unmount(); - expect(mockUnsubscribe).toHaveBeenCalled(); - }); - - it('should handle websocket subscription errors gracefully', () => { - const { container } = render(InstancesPage); - - // Should handle websocket errors gracefully - expect(container).toBeInTheDocument(); - }); - }); - - describe('Data Table Integration', () => { - it('should integrate with DataTable component', () => { - const { container } = render(InstancesPage); - - // Should integrate with DataTable for instance display - expect(container).toBeInTheDocument(); - }); - - it('should configure table columns properly', () => { - const { container } = render(InstancesPage); - - // Should configure columns for instance display - expect(container).toBeInTheDocument(); - }); - - it('should configure mobile card layout', () => { - const { container } = render(InstancesPage); - - // Should configure mobile-friendly layout - expect(container).toBeInTheDocument(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/instances/page.test.ts b/webapp/src/routes/instances/page.test.ts deleted file mode 100644 index 16821580..00000000 --- a/webapp/src/routes/instances/page.test.ts +++ /dev/null @@ -1,413 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render, screen } from '@testing-library/svelte'; -import InstancesPage from './+page.svelte'; -import { createMockInstance } from '../../test/factories.js'; - -// Mock the page stores -vi.mock('$app/stores', () => ({})); - -// Mock navigation -vi.mock('$app/navigation', () => ({})); - -// Mock the API client -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - listInstances: vi.fn(), - deleteInstance: vi.fn() - } -})); - -// Mock stores -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn() - } -})); - -vi.mock('$lib/stores/websocket.js', () => ({ - websocketStore: { - subscribeToEntity: vi.fn(() => vi.fn()) - } -})); - -// Mock utilities -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -const mockInstance = createMockInstance({ - name: 'test-instance', - provider_id: 'prov-123', - status: 'running', - runner_status: 'idle' -}); - -const mockInstances = [mockInstance]; - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/PageHeader.svelte'); -vi.unmock('$lib/components/DataTable.svelte'); -vi.unmock('$lib/components/DeleteModal.svelte'); -vi.unmock('$lib/components/cells'); - -describe('Instances Page - Unit Tests', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up default API mock - const { garmApi } = await import('$lib/api/client.js'); - (garmApi.listInstances as any).mockResolvedValue(mockInstances); - }); - - describe('Component Initialization', () => { - it('should render successfully', () => { - const { container } = render(InstancesPage); - expect(container).toBeInTheDocument(); - }); - - it('should set page title', () => { - render(InstancesPage); - expect(document.title).toContain('Instances - GARM'); - }); - }); - - describe('Data Loading', () => { - it('should load instances on mount', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(InstancesPage); - - // Wait for component mount - await new Promise(resolve => setTimeout(resolve, 0)); - - expect(garmApi.listInstances).toHaveBeenCalled(); - }); - - it('should handle loading state', async () => { - const { container } = render(InstancesPage); - - // Component should render without error during loading - expect(container).toBeInTheDocument(); - - // Should have access to loading state - expect(document.title).toContain('Instances - GARM'); - }); - - it('should handle API error state', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - // Mock API to fail - const error = new Error('Failed to load instances'); - (garmApi.listInstances as any).mockRejectedValue(error); - - const { container } = render(InstancesPage); - - // Wait for the error to be handled - await new Promise(resolve => setTimeout(resolve, 100)); - - // Component should handle error gracefully - expect(container).toBeInTheDocument(); - }); - - it('should retry loading instances', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(InstancesPage); - - // Verify retry functionality is available - expect(garmApi.listInstances).toBeDefined(); - }); - }); - - describe('Search and Filtering', () => { - it('should handle search functionality', async () => { - render(InstancesPage); - - // Component should have search filtering logic available - expect(screen.getByPlaceholderText(/Search instances/i)).toBeInTheDocument(); - - // Verify search field is properly configured (uses text type for compatibility) - const searchInput = screen.getByPlaceholderText(/Search instances/i); - expect(searchInput).toHaveAttribute('type', 'text'); - }); - - it('should handle status filtering', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(InstancesPage); - - // Component should have API available for loading instances with different statuses - expect(garmApi.listInstances).toBeDefined(); - - // Component structure should be in place for status filtering - expect(document.title).toContain('Instances - GARM'); - }); - - it('should handle pagination', async () => { - render(InstancesPage); - - // Component should handle pagination state through the DataTable - expect(screen.getByText(/Loading instances/i)).toBeInTheDocument(); - - // Pagination controls should be available - expect(screen.getByText(/Show:/i)).toBeInTheDocument(); - expect(screen.getByDisplayValue('25')).toBeInTheDocument(); - }); - }); - - describe('Instance Deletion', () => { - it('should have proper structure for instance deletion', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(InstancesPage); - - expect(garmApi.deleteInstance).toBeDefined(); - }); - - it('should show success toast after instance deletion', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(InstancesPage); - - expect(toastStore.success).toBeDefined(); - }); - - it('should handle deletion errors', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(InstancesPage); - - expect(toastStore.error).toBeDefined(); - }); - }); - - describe('Modal Management', () => { - it('should handle delete modal state', async () => { - render(InstancesPage); - - // Component should have delete API for modal functionality - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.deleteInstance).toBeDefined(); - - // Should have toast notifications for delete feedback - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.success).toBeDefined(); - expect(toastStore.error).toBeDefined(); - }); - - it('should handle modal close functionality', () => { - render(InstancesPage); - - // Component should manage modal state for delete confirmation - expect(screen.getByRole('heading', { name: 'Runner Instances' })).toBeInTheDocument(); - - // Modal infrastructure should be ready for delete operations - expect(document.body).toBeInTheDocument(); - }); - }); - - describe('WebSocket Integration', () => { - it('should subscribe to websocket events on mount', async () => { - const { websocketStore } = await import('$lib/stores/websocket.js'); - - render(InstancesPage); - - // Wait for component mount - await new Promise(resolve => setTimeout(resolve, 0)); - - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith( - 'instance', - ['create', 'update', 'delete'], - expect.any(Function) - ); - }); - - it('should handle websocket instance events', async () => { - const { websocketStore } = await import('$lib/stores/websocket.js'); - - render(InstancesPage); - - // Wait for component mount - await new Promise(resolve => setTimeout(resolve, 0)); - - // Component should have websocket event handling logic integrated - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith( - 'instance', - ['create', 'update', 'delete'], - expect.any(Function) - ); - }); - - it('should unsubscribe from websocket on destroy', async () => { - const mockUnsubscribe = vi.fn(); - const { websocketStore } = await import('$lib/stores/websocket.js'); - (websocketStore.subscribeToEntity as any).mockReturnValue(mockUnsubscribe); - - const { unmount } = render(InstancesPage); - - // Wait for component mount - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have subscribed - expect(websocketStore.subscribeToEntity).toHaveBeenCalled(); - - // Unmount should call unsubscribe - unmount(); - expect(mockUnsubscribe).toHaveBeenCalled(); - }); - }); - - describe('Component Lifecycle', () => { - it('should mount successfully', () => { - const component = render(InstancesPage); - expect(component.component).toBeDefined(); - }); - - it('should unmount without errors', () => { - const { unmount } = render(InstancesPage); - expect(() => unmount()).not.toThrow(); - }); - - it('should handle component initialization', async () => { - const { container } = render(InstancesPage); - - // Component should initialize and render properly - expect(container).toBeInTheDocument(); - - // Should set page title during initialization - expect(document.title).toContain('Instances - GARM'); - - // Should load instances during initialization - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.listInstances).toBeDefined(); - }); - }); - - describe('Data Transformation', () => { - it('should handle instance filtering logic', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(InstancesPage); - - // Component should filter instances by search and status - expect(garmApi.listInstances).toBeDefined(); - - // Search functionality should be available - expect(screen.getByPlaceholderText(/Search instances/i)).toBeInTheDocument(); - }); - - it('should handle pagination calculations', () => { - render(InstancesPage); - - // Component should calculate pagination correctly through DataTable - expect(screen.getByText(/Loading instances/i)).toBeInTheDocument(); - - // Pagination controls should be available - expect(screen.getByDisplayValue('25')).toBeInTheDocument(); - }); - - it('should handle status matching logic', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(InstancesPage); - - // Component should match both status and runner_status for filtering - expect(garmApi.listInstances).toBeDefined(); - - // Component should handle dual status fields (status and runner_status) - expect(screen.getByRole('heading', { name: 'Runner Instances' })).toBeInTheDocument(); - }); - }); - - describe('Event Handling', () => { - it('should handle table search events', () => { - render(InstancesPage); - - // Component should handle search event from DataTable - expect(screen.getByText(/Loading instances/i)).toBeInTheDocument(); - - // Search input should be available for search events - expect(screen.getByPlaceholderText(/Search instances/i)).toBeInTheDocument(); - }); - - it('should handle table pagination events', () => { - render(InstancesPage); - - // Component should handle pagination events from DataTable - expect(screen.getByText(/Loading instances/i)).toBeInTheDocument(); - - // Pagination controls should be integrated - expect(screen.getByDisplayValue('25')).toBeInTheDocument(); - }); - - it('should handle delete events', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(InstancesPage); - - // Component should handle delete events from DataTable - expect(garmApi.deleteInstance).toBeDefined(); - - // Delete infrastructure should be ready - expect(screen.getByRole('heading', { name: 'Runner Instances' })).toBeInTheDocument(); - }); - - it('should handle retry events', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(InstancesPage); - - // Component should handle retry events from DataTable - expect(garmApi.listInstances).toBeDefined(); - - // DataTable should be rendered for retry functionality - expect(screen.getByText(/Loading instances/i)).toBeInTheDocument(); - }); - }); - - describe('Utility Functions', () => { - it('should handle API error extraction', async () => { - const { extractAPIError } = await import('$lib/utils/apiError'); - - render(InstancesPage); - - expect(extractAPIError).toBeDefined(); - }); - - it('should handle instance identification', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(InstancesPage); - - // Component should identify instances by name (not id) - expect(garmApi.deleteInstance).toBeDefined(); - - // Instance identification should work with instance names - expect(screen.getByRole('heading', { name: 'Runner Instances' })).toBeInTheDocument(); - }); - }); - - describe('No Edit Functionality', () => { - it('should not have edit functionality for instances', () => { - render(InstancesPage); - - // Instances are read-only with no edit capability - expect(screen.getByRole('heading', { name: 'Runner Instances' })).toBeInTheDocument(); - - // Should not have add action button since showAction is false - expect(screen.queryByText(/Add/)).not.toBeInTheDocument(); - }); - - it('should handle edit events as no-op', () => { - render(InstancesPage); - - // Edit handler should be a no-op for instances - expect(screen.getByRole('heading', { name: 'Runner Instances' })).toBeInTheDocument(); - - // Component should render without edit functionality - expect(document.body).toBeInTheDocument(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/login/+page.svelte b/webapp/src/routes/login/+page.svelte deleted file mode 100644 index 6fdfac81..00000000 --- a/webapp/src/routes/login/+page.svelte +++ /dev/null @@ -1,160 +0,0 @@ - - - - Login - GARM - - -
                -
                -
                -
                - GARM - -
                -

                - Sign in to GARM -

                -

                - GitHub Actions Runner Manager -

                -
                - -
                -
                -
                - - -
                -
                - - -
                -
                - - {#if error} -
                -
                -
                - - - -
                -
                -

                - {error} -

                -
                -
                -
                - {/if} - -
                - -
                -
                -
                -
                \ No newline at end of file diff --git a/webapp/src/routes/login/page.integration.test.ts b/webapp/src/routes/login/page.integration.test.ts deleted file mode 100644 index 8d1d26dc..00000000 --- a/webapp/src/routes/login/page.integration.test.ts +++ /dev/null @@ -1,757 +0,0 @@ -import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; -import { render, screen, waitFor, fireEvent } from '@testing-library/svelte'; -import LoginPage from './+page.svelte'; - -// Helper function to create complete AuthState objects -function createMockAuthState(overrides: any = {}) { - return { - isAuthenticated: false, - user: null, - loading: false, - needsInitialization: false, - ...overrides - }; -} - -// Mock app stores and navigation -vi.mock('$app/navigation', () => ({ - goto: vi.fn() -})); - -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path: string) => path) -})); - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/Button.svelte'); - -// Only mock the auth store and API -vi.mock('$lib/stores/auth.js', () => ({ - authStore: { - subscribe: vi.fn((callback: (state: any) => void) => { - callback(createMockAuthState()); - return () => {}; - }) - }, - auth: { - login: vi.fn() - } -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -// Global setup for each test -let auth: any; -let authStore: any; -let goto: any; -let resolve: any; -let extractAPIError: any; - -// Mock DOM APIs -const mockLocalStorage = { - getItem: vi.fn(), - setItem: vi.fn(), - removeItem: vi.fn() -}; - -const mockMatchMedia = vi.fn(); - -describe('Comprehensive Integration Tests for Login Page', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up API mocks with default successful responses - const authModule = await import('$lib/stores/auth.js'); - auth = authModule.auth; - authStore = authModule.authStore; - - const navigationModule = await import('$app/navigation'); - goto = navigationModule.goto; - - const pathsModule = await import('$app/paths'); - resolve = pathsModule.resolve; - - const apiErrorModule = await import('$lib/utils/apiError'); - extractAPIError = apiErrorModule.extractAPIError; - - // Mock DOM APIs - Object.defineProperty(window, 'localStorage', { value: mockLocalStorage }); - Object.defineProperty(window, 'matchMedia', { value: mockMatchMedia }); - - (auth.login as any).mockResolvedValue({}); - (resolve as any).mockImplementation((path: string) => path); - (mockLocalStorage.getItem as any).mockReturnValue(null); - (mockMatchMedia as any).mockReturnValue({ matches: false }); - (extractAPIError as any).mockImplementation((err: any) => err.message || 'Unknown error'); - }); - - afterEach(() => { - // Clean up DOM changes - document.documentElement.classList.remove('dark'); - vi.restoreAllMocks(); - }); - - describe('Component Rendering and Integration', () => { - it('should render login page with real components', async () => { - render(LoginPage); - - await waitFor(() => { - // Should render all main components - expect(screen.getByRole('heading', { name: 'Sign in to GARM' })).toBeInTheDocument(); - expect(screen.getByText('GitHub Actions Runner Manager')).toBeInTheDocument(); - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - expect(screen.getByLabelText('Password')).toBeInTheDocument(); - expect(screen.getByRole('button', { name: /sign in/i })).toBeInTheDocument(); - }); - }); - - it('should integrate theme initialization with DOM', async () => { - render(LoginPage); - - await waitFor(() => { - // Should call localStorage to check theme - expect(mockLocalStorage.getItem).toHaveBeenCalledWith('theme'); - }); - - // Should not have dark class initially (light theme) - expect(document.documentElement.classList.contains('dark')).toBe(false); - }); - - it('should render proper logo integration', async () => { - render(LoginPage); - - await waitFor(() => { - const logos = screen.getAllByAltText('GARM'); - expect(logos).toHaveLength(2); - - // Should have proper src paths resolved - expect(resolve).toHaveBeenCalledWith('/assets/garm-light.svg'); - expect(resolve).toHaveBeenCalledWith('/assets/garm-dark.svg'); - }); - }); - - it('should integrate all form components properly', async () => { - render(LoginPage); - - await waitFor(() => { - // All form elements should be integrated - const form = document.querySelector('form'); - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - const submitButton = screen.getByRole('button', { name: /sign in/i }); - - expect(form).toBeInTheDocument(); - expect(usernameInput).toBeInTheDocument(); - expect(passwordInput).toBeInTheDocument(); - expect(submitButton).toBeInTheDocument(); - }); - }); - }); - - describe('Authentication Workflow Integration', () => { - it('should handle complete login workflow', async () => { - render(LoginPage); - - await waitFor(() => { - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - }); - - // Complete login workflow - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - const submitButton = screen.getByRole('button', { name: /sign in/i }); - - // User enters credentials - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - - // User submits form - await fireEvent.click(submitButton); - - // Should call auth API - expect(auth.login).toHaveBeenCalledWith('testuser', 'password123'); - - // Should redirect on success - expect(goto).toHaveBeenCalledWith('/'); - }); - - it('should handle authentication redirect integration', async () => { - // Mock already authenticated user - vi.mocked(authStore.subscribe).mockImplementation((callback: (state: any) => void) => { - callback(createMockAuthState({ isAuthenticated: true, user: 'testuser' })); - return () => {}; - }); - - render(LoginPage); - - await waitFor(() => { - // Should automatically redirect - expect(goto).toHaveBeenCalledWith('/'); - }); - }); - - it('should integrate error handling with UI display', async () => { - const error = new Error('Invalid credentials'); - (auth.login as any).mockRejectedValue(error); - - render(LoginPage); - - await waitFor(() => { - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - }); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - const submitButton = screen.getByRole('button', { name: /sign in/i }); - - // Enter credentials and submit - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'wrongpassword' } }); - await fireEvent.click(submitButton); - - // Should display error in UI - await waitFor(() => { - expect(screen.getByText('Invalid credentials')).toBeInTheDocument(); - }); - - // Should extract API error properly - expect(extractAPIError).toHaveBeenCalledWith(error); - }); - - it('should handle loading state integration', async () => { - // Mock delayed login - let resolveLogin: () => void; - const loginPromise = new Promise((resolve) => { - resolveLogin = resolve; - }); - (auth.login as any).mockReturnValue(loginPromise); - - render(LoginPage); - - await waitFor(() => { - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - }); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - const submitButton = screen.getByRole('button', { name: /sign in/i }); - - // Enter credentials and submit - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.click(submitButton); - - // Should show loading state - await waitFor(() => { - expect(screen.getByText('Signing in...')).toBeInTheDocument(); - expect(usernameInput).toBeDisabled(); - expect(passwordInput).toBeDisabled(); - }); - - // Complete login - resolveLogin!(); - await loginPromise; - }); - }); - - describe('Theme Integration Workflows', () => { - it('should apply dark theme from localStorage', async () => { - (mockLocalStorage.getItem as any).mockReturnValue('dark'); - - render(LoginPage); - - await waitFor(() => { - expect(mockLocalStorage.getItem).toHaveBeenCalledWith('theme'); - }); - - // Should apply dark theme to document - expect(document.documentElement.classList.contains('dark')).toBe(true); - }); - - it('should apply light theme from localStorage', async () => { - (mockLocalStorage.getItem as any).mockReturnValue('light'); - - render(LoginPage); - - await waitFor(() => { - expect(mockLocalStorage.getItem).toHaveBeenCalledWith('theme'); - }); - - // Should remove dark theme from document - expect(document.documentElement.classList.contains('dark')).toBe(false); - }); - - it('should use system preference when no saved theme', async () => { - (mockLocalStorage.getItem as any).mockReturnValue(null); - (mockMatchMedia as any).mockReturnValue({ matches: true }); // Dark system preference - - render(LoginPage); - - await waitFor(() => { - expect(mockMatchMedia).toHaveBeenCalledWith('(prefers-color-scheme: dark)'); - }); - - // Should apply dark theme based on system preference - expect(document.documentElement.classList.contains('dark')).toBe(true); - }); - - it('should handle system preference for light theme', async () => { - (mockLocalStorage.getItem as any).mockReturnValue(null); - (mockMatchMedia as any).mockReturnValue({ matches: false }); // Light system preference - - render(LoginPage); - - await waitFor(() => { - expect(mockMatchMedia).toHaveBeenCalledWith('(prefers-color-scheme: dark)'); - }); - - // Should not apply dark theme - expect(document.documentElement.classList.contains('dark')).toBe(false); - }); - - it('should handle theme integration with logo display', async () => { - render(LoginPage); - - await waitFor(() => { - const logos = screen.getAllByAltText('GARM'); - expect(logos).toHaveLength(2); - }); - - // Should have proper theme-aware classes - const logos = screen.getAllByAltText('GARM'); - const lightLogo = logos.find(img => img.classList.contains('dark:hidden')); - const darkLogo = logos.find(img => img.classList.contains('hidden')); - - expect(lightLogo).toBeInTheDocument(); - expect(darkLogo).toBeInTheDocument(); - }); - }); - - describe('Form Interaction Integration', () => { - it('should handle keyboard interaction workflows', async () => { - render(LoginPage); - - await waitFor(() => { - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - }); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - - // Enter credentials - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - - // Press Enter in username field - await fireEvent.keyPress(usernameInput, { key: 'Enter', code: 'Enter' }); - - // Should trigger login - expect(auth.login).toHaveBeenCalledWith('testuser', 'password123'); - }); - - it('should handle form submission prevention', async () => { - render(LoginPage); - - await waitFor(() => { - expect(document.querySelector('form')).toBeInTheDocument(); - }); - - const form = document.querySelector('form')! - - // Form should have proper structure for preventing default submission - expect(form).toBeInTheDocument(); - }); - - it('should integrate form validation with UI feedback', async () => { - render(LoginPage); - - await waitFor(() => { - expect(screen.getByRole('button', { name: /sign in/i })).toBeInTheDocument(); - }); - - const form = document.querySelector('form')!; - - // Submit empty form via form submission - await fireEvent.submit(form); - - // Should show validation error - await waitFor(() => { - expect(screen.getByText('Please enter both username and password')).toBeInTheDocument(); - }); - - // Should not call auth API - expect(auth.login).not.toHaveBeenCalled(); - }); - - it('should handle partial validation scenarios', async () => { - render(LoginPage); - - await waitFor(() => { - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - }); - - const usernameInput = screen.getByLabelText('Username'); - const form = document.querySelector('form')!; - - // Enter only username - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.submit(form); - - // Should show validation error - await waitFor(() => { - expect(screen.getByText('Please enter both username and password')).toBeInTheDocument(); - }); - - // Should not call auth API - expect(auth.login).not.toHaveBeenCalled(); - }); - }); - - describe('Error Handling Integration', () => { - it('should integrate API error extraction and display', async () => { - const error = new Error('Server error occurred'); - (auth.login as any).mockRejectedValue(error); - (extractAPIError as any).mockReturnValue('Server error occurred'); - - render(LoginPage); - - await waitFor(() => { - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - }); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - const submitButton = screen.getByRole('button', { name: /sign in/i }); - - // Enter credentials and submit - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.click(submitButton); - - // Should extract and display error - await waitFor(() => { - expect(extractAPIError).toHaveBeenCalledWith(error); - expect(screen.getByText('Server error occurred')).toBeInTheDocument(); - }); - }); - - it('should handle error state recovery', async () => { - // First cause an error - const error = new Error('First error'); - (auth.login as any).mockRejectedValue(error); - - render(LoginPage); - - await waitFor(() => { - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - }); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - const submitButton = screen.getByRole('button', { name: /sign in/i }); - - // Trigger error - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.click(submitButton); - - await waitFor(() => { - expect(screen.getByText('First error')).toBeInTheDocument(); - }); - - // Now mock success and try again - (auth.login as any).mockResolvedValue({}); - await fireEvent.click(submitButton); - - // Error should be cleared - await waitFor(() => { - expect(screen.queryByText('First error')).not.toBeInTheDocument(); - }); - }); - - it('should integrate error styling with theme', async () => { - const error = new Error('Authentication failed'); - (auth.login as any).mockRejectedValue(error); - - render(LoginPage); - - await waitFor(() => { - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - }); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - const submitButton = screen.getByRole('button', { name: /sign in/i }); - - // Trigger error - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.click(submitButton); - - // Should display error with proper styling - await waitFor(() => { - const errorMessage = screen.getByText('Authentication failed'); - expect(errorMessage).toBeInTheDocument(); - - // Should have proper error styling container - const errorContainer = errorMessage.closest('.bg-red-50'); - expect(errorContainer).toBeInTheDocument(); - }); - }); - }); - - describe('State Management Integration', () => { - it('should integrate auth store subscription', async () => { - render(LoginPage); - - await waitFor(() => { - // Should subscribe to auth store - expect(authStore.subscribe).toHaveBeenCalled(); - }); - }); - - it('should handle auth store state changes', async () => { - // Mock store that changes state - let callback: (state: any) => void; - vi.mocked(authStore.subscribe).mockImplementation((cb: (state: any) => void) => { - callback = cb; - cb(createMockAuthState({ isAuthenticated: false })); - return () => {}; - }); - - render(LoginPage); - - await waitFor(() => { - expect(authStore.subscribe).toHaveBeenCalled(); - }); - - // Simulate auth state change - callback!(createMockAuthState({ isAuthenticated: true, user: 'testuser' })); - - // Should trigger redirect - await waitFor(() => { - expect(goto).toHaveBeenCalledWith('/'); - }); - }); - - it('should maintain component state during interactions', async () => { - render(LoginPage); - - await waitFor(() => { - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - }); - - const usernameInput = screen.getByLabelText('Username') as HTMLInputElement; - const passwordInput = screen.getByLabelText('Password') as HTMLInputElement; - - // Enter values - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - - // Values should be maintained - expect(usernameInput.value).toBe('testuser'); - expect(passwordInput.value).toBe('password123'); - }); - - it('should handle loading state transitions', async () => { - // Mock login that resolves after delay - let resolveLogin: () => void; - const loginPromise = new Promise((resolve) => { - resolveLogin = resolve; - }); - (auth.login as any).mockReturnValue(loginPromise); - - render(LoginPage); - - await waitFor(() => { - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - }); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - const submitButton = screen.getByRole('button', { name: /sign in/i }); - - // Initial state - not loading - expect(screen.getByText('Sign in')).toBeInTheDocument(); - expect(usernameInput).not.toBeDisabled(); - expect(passwordInput).not.toBeDisabled(); - - // Enter credentials and submit - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.click(submitButton); - - // Should transition to loading state - await waitFor(() => { - expect(screen.getByText('Signing in...')).toBeInTheDocument(); - expect(usernameInput).toBeDisabled(); - expect(passwordInput).toBeDisabled(); - }); - - // Complete login - resolveLogin!(); - await loginPromise; - - // Should redirect - await waitFor(() => { - expect(goto).toHaveBeenCalledWith('/'); - }); - }); - }); - - describe('Navigation Integration', () => { - it('should integrate path resolution', async () => { - render(LoginPage); - - await waitFor(() => { - // Should resolve asset paths - expect(resolve).toHaveBeenCalledWith('/assets/garm-light.svg'); - expect(resolve).toHaveBeenCalledWith('/assets/garm-dark.svg'); - }); - }); - - it('should handle navigation on successful login', async () => { - render(LoginPage); - - await waitFor(() => { - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - }); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - const submitButton = screen.getByRole('button', { name: /sign in/i }); - - // Successful login flow - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.click(submitButton); - - // Should navigate to home with resolved path - await waitFor(() => { - expect(goto).toHaveBeenCalledWith('/'); - }); - }); - - it('should integrate automatic redirect for authenticated users', async () => { - // Mock authenticated user from start - vi.mocked(authStore.subscribe).mockImplementation((callback: (state: any) => void) => { - callback(createMockAuthState({ isAuthenticated: true, user: 'existinguser' })); - return () => {}; - }); - - render(LoginPage); - - // Should immediately redirect - await waitFor(() => { - expect(goto).toHaveBeenCalledWith('/'); - }); - }); - }); - - describe('Accessibility Integration', () => { - it('should integrate keyboard navigation flow', async () => { - render(LoginPage); - - await waitFor(() => { - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - }); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - - // Should support tab navigation - usernameInput.focus(); - expect(document.activeElement).toBe(usernameInput); - - // Should support keyboard form submission - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.keyPress(passwordInput, { key: 'Enter', code: 'Enter' }); - - // Should submit form - expect(auth.login).toHaveBeenCalledWith('testuser', 'password123'); - }); - - it('should maintain accessibility during loading states', async () => { - // Mock delayed login - let resolveLogin: () => void; - const loginPromise = new Promise((resolve) => { - resolveLogin = resolve; - }); - (auth.login as any).mockReturnValue(loginPromise); - - render(LoginPage); - - await waitFor(() => { - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - }); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - const submitButton = screen.getByRole('button', { name: /sign in/i }); - - // Submit form - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.click(submitButton); - - // Should maintain proper labels during loading - await waitFor(() => { - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - expect(screen.getByLabelText('Password')).toBeInTheDocument(); - expect(screen.getByRole('button', { name: /signing in/i })).toBeInTheDocument(); - }); - - // Complete login - resolveLogin!(); - await loginPromise; - }); - }); - - describe('Component Lifecycle Integration', () => { - it('should handle complete component lifecycle', () => { - const { unmount } = render(LoginPage); - - // Should mount without errors - expect(screen.getByRole('heading', { name: 'Sign in to GARM' })).toBeInTheDocument(); - - // Should unmount without errors - expect(() => unmount()).not.toThrow(); - }); - - it('should integrate properly with Svelte lifecycle', async () => { - render(LoginPage); - - // Should complete mount phase - await waitFor(() => { - expect(screen.getByRole('heading', { name: 'Sign in to GARM' })).toBeInTheDocument(); - expect(mockLocalStorage.getItem).toHaveBeenCalledWith('theme'); - }); - }); - - it('should handle reactive updates', async () => { - // Mock store with reactive updates - let callback: (state: any) => void; - vi.mocked(authStore.subscribe).mockImplementation((cb: (state: any) => void) => { - callback = cb; - cb(createMockAuthState({ isAuthenticated: false })); - return () => {}; - }); - - render(LoginPage); - - await waitFor(() => { - expect(authStore.subscribe).toHaveBeenCalled(); - }); - - // Should handle reactive state change - callback!(createMockAuthState({ isAuthenticated: true, user: 'newuser' })); - - await waitFor(() => { - expect(goto).toHaveBeenCalledWith('/'); - }); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/login/page.render.test.ts b/webapp/src/routes/login/page.render.test.ts deleted file mode 100644 index 15d355e9..00000000 --- a/webapp/src/routes/login/page.render.test.ts +++ /dev/null @@ -1,497 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render, screen } from '@testing-library/svelte'; -import LoginPage from './+page.svelte'; - -// Helper function to create complete AuthState objects -function createMockAuthState(overrides: any = {}) { - return { - isAuthenticated: false, - user: null, - loading: false, - needsInitialization: false, - ...overrides - }; -} - -// Mock all external dependencies -vi.mock('$app/navigation', () => ({ - goto: vi.fn() -})); - -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path: string) => path) -})); - -vi.mock('$lib/stores/auth.js', () => ({ - authStore: { - subscribe: vi.fn((callback: (state: any) => void) => { - callback(createMockAuthState()); - return () => {}; - }) - }, - auth: { - login: vi.fn() - } -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/Button.svelte'); - -// Mock DOM APIs -const mockLocalStorage = { - getItem: vi.fn(), - setItem: vi.fn(), - removeItem: vi.fn() -}; - -const mockMatchMedia = vi.fn(); - -describe('Login Page - Render Tests', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up default API mocks - const { auth } = await import('$lib/stores/auth.js'); - (auth.login as any).mockResolvedValue({}); - - const { resolve } = await import('$app/paths'); - (resolve as any).mockImplementation((path: string) => path); - - // Mock DOM APIs - Object.defineProperty(window, 'localStorage', { value: mockLocalStorage }); - Object.defineProperty(window, 'matchMedia', { value: mockMatchMedia }); - - (mockLocalStorage.getItem as any).mockReturnValue(null); - (mockMatchMedia as any).mockReturnValue({ matches: false }); - }); - - describe('Basic Rendering', () => { - it('should render without crashing', () => { - const { container } = render(LoginPage); - expect(container).toBeInTheDocument(); - }); - - it('should have proper document structure', () => { - const { container } = render(LoginPage); - expect(container.querySelector('.min-h-screen')).toBeInTheDocument(); - }); - - it('should render main layout container', () => { - render(LoginPage); - - // Should have main container with proper styling - const mainContainer = document.querySelector('.min-h-screen.flex.items-center.justify-center'); - expect(mainContainer).toBeInTheDocument(); - }); - - it('should render centered content area', () => { - render(LoginPage); - - // Should have centered content area - const contentArea = document.querySelector('.max-w-md.w-full.space-y-8'); - expect(contentArea).toBeInTheDocument(); - }); - }); - - describe('Component Lifecycle', () => { - it('should mount successfully', () => { - const { component } = render(LoginPage); - expect(component).toBeDefined(); - }); - - it('should unmount without errors', () => { - const { unmount } = render(LoginPage); - expect(() => unmount()).not.toThrow(); - }); - - it('should handle component updates', () => { - const { component } = render(LoginPage); - - // Component should handle reactive updates - expect(component).toBeDefined(); - }); - - it('should complete mount process successfully', () => { - render(LoginPage); - - // Should complete mount without errors - // (Theme initialization works in browser but not in test environment) - expect(screen.getByRole('heading', { name: 'Sign in to GARM' })).toBeInTheDocument(); - }); - }); - - describe('DOM Structure', () => { - it('should create proper DOM hierarchy', () => { - const { container } = render(LoginPage); - - // Should have main container - const mainContainer = container.querySelector('.min-h-screen'); - expect(mainContainer).toBeInTheDocument(); - - // Should have content area - const contentArea = container.querySelector('.max-w-md'); - expect(contentArea).toBeInTheDocument(); - }); - - it('should render svelte:head for page title', () => { - render(LoginPage); - - // Should set page title - expect(document.title).toBe('Login - GARM'); - }); - - it('should handle responsive layout classes', () => { - render(LoginPage); - - // Should have responsive layout - const mainContainer = document.querySelector('.min-h-screen.flex.items-center.justify-center.bg-gray-50.dark\\:bg-gray-900.py-12.px-4.sm\\:px-6.lg\\:px-8'); - expect(mainContainer).toBeInTheDocument(); - }); - }); - - describe('Header Section Rendering', () => { - it('should render logo section', () => { - render(LoginPage); - - // Should have logo container - const logoContainer = document.querySelector('.mx-auto.h-48.w-auto.flex.justify-center'); - expect(logoContainer).toBeInTheDocument(); - }); - - it('should render both light and dark logos', () => { - render(LoginPage); - - const logos = screen.getAllByAltText('GARM'); - expect(logos).toHaveLength(2); - - // Should have light logo (visible by default) - const lightLogo = logos.find(img => img.classList.contains('dark:hidden')); - expect(lightLogo).toBeInTheDocument(); - - // Should have dark logo (hidden by default) - const darkLogo = logos.find(img => img.classList.contains('hidden')); - expect(darkLogo).toBeInTheDocument(); - }); - - it('should render page title and description', () => { - render(LoginPage); - - // Should render main heading - expect(screen.getByRole('heading', { name: 'Sign in to GARM' })).toBeInTheDocument(); - - // Should render description - expect(screen.getByText('GitHub Actions Runner Manager')).toBeInTheDocument(); - }); - - it('should have proper heading hierarchy', () => { - render(LoginPage); - - const heading = screen.getByRole('heading', { name: 'Sign in to GARM' }); - expect(heading.tagName).toBe('H2'); - expect(heading).toHaveClass('text-3xl', 'font-extrabold'); - }); - }); - - describe('Form Rendering', () => { - it('should render login form', () => { - render(LoginPage); - - // Should have form element - const form = document.querySelector('form'); - expect(form).toBeInTheDocument(); - expect(form).toHaveClass('mt-8', 'space-y-6'); - }); - - it('should render username input field', () => { - render(LoginPage); - - const usernameInput = screen.getByLabelText('Username'); - expect(usernameInput).toBeInTheDocument(); - expect(usernameInput).toHaveAttribute('type', 'text'); - expect(usernameInput).toHaveAttribute('name', 'username'); - expect(usernameInput).toHaveAttribute('required'); - expect(usernameInput).toHaveAttribute('placeholder', 'Username'); - }); - - it('should render password input field', () => { - render(LoginPage); - - const passwordInput = screen.getByLabelText('Password'); - expect(passwordInput).toBeInTheDocument(); - expect(passwordInput).toHaveAttribute('type', 'password'); - expect(passwordInput).toHaveAttribute('name', 'password'); - expect(passwordInput).toHaveAttribute('required'); - expect(passwordInput).toHaveAttribute('placeholder', 'Password'); - }); - - it('should render submit button', () => { - render(LoginPage); - - const submitButton = screen.getByRole('button', { name: /sign in/i }); - expect(submitButton).toBeInTheDocument(); - expect(submitButton).toHaveAttribute('type', 'submit'); - }); - - it('should have proper form styling', () => { - render(LoginPage); - - // Should have rounded form container - const formContainer = document.querySelector('.rounded-md.shadow-sm.-space-y-px'); - expect(formContainer).toBeInTheDocument(); - - // Username should have rounded top - const usernameInput = screen.getByLabelText('Username'); - expect(usernameInput).toHaveClass('rounded-t-md'); - - // Password should have rounded bottom - const passwordInput = screen.getByLabelText('Password'); - expect(passwordInput).toHaveClass('rounded-b-md'); - }); - }); - - describe('Error State Rendering', () => { - it('should not show error state initially', () => { - render(LoginPage); - - // Should not have error container initially - const errorContainer = document.querySelector('.bg-red-50'); - expect(errorContainer).not.toBeInTheDocument(); - }); - - it('should conditionally render error display', () => { - render(LoginPage); - - // Error display should be conditional (not visible initially) - expect(screen.queryByText(/error/i)).not.toBeInTheDocument(); - }); - - it('should have proper error styling structure ready', () => { - render(LoginPage); - - // Form should be structured to accommodate error display - const form = document.querySelector('form'); - expect(form).toHaveClass('space-y-6'); - }); - }); - - describe('Button Integration', () => { - it('should integrate Button component', () => { - render(LoginPage); - - const submitButton = screen.getByRole('button', { name: /sign in/i }); - expect(submitButton).toBeInTheDocument(); - }); - - it('should pass correct props to Button', () => { - render(LoginPage); - - const submitButton = screen.getByRole('button', { name: /sign in/i }); - - // Should be submit type - expect(submitButton).toHaveAttribute('type', 'submit'); - - // Should have primary variant styling (blue background) - expect(submitButton).toHaveClass('bg-blue-600'); - }); - - it('should render Button with full width', () => { - render(LoginPage); - - const submitButton = screen.getByRole('button', { name: /sign in/i }); - expect(submitButton).toHaveClass('w-full'); - }); - }); - - describe('Accessibility Features', () => { - it('should have proper form labels', () => { - render(LoginPage); - - // Username field should have accessible label - const usernameLabel = screen.getByLabelText('Username'); - expect(usernameLabel).toBeInTheDocument(); - - // Password field should have accessible label - const passwordLabel = screen.getByLabelText('Password'); - expect(passwordLabel).toBeInTheDocument(); - }); - - it('should have screen reader only labels', () => { - render(LoginPage); - - // Should have sr-only labels for form fields - const labels = document.querySelectorAll('.sr-only'); - expect(labels.length).toBeGreaterThanOrEqual(2); // At least username and password labels - }); - - it('should have proper form semantics', () => { - render(LoginPage); - - // Should have form element - const form = document.querySelector('form'); - expect(form).toBeInTheDocument(); - - // Should have submit button - const submitButton = screen.getByRole('button', { name: /sign in/i }); - expect(submitButton).toHaveAttribute('type', 'submit'); - }); - - it('should support keyboard navigation', () => { - render(LoginPage); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - const submitButton = screen.getByRole('button', { name: /sign in/i }); - - // All elements should be focusable - expect(usernameInput).toBeInTheDocument(); - expect(passwordInput).toBeInTheDocument(); - expect(submitButton).toBeInTheDocument(); - }); - }); - - describe('Theme Support', () => { - it('should have dark mode classes', () => { - render(LoginPage); - - // Should have dark mode background - const mainContainer = document.querySelector('.dark\\:bg-gray-900'); - expect(mainContainer).toBeInTheDocument(); - - // Should have dark mode text colors - const heading = screen.getByRole('heading', { name: 'Sign in to GARM' }); - expect(heading).toHaveClass('dark:text-white'); - }); - - it('should handle theme-aware logo display', () => { - render(LoginPage); - - const logos = screen.getAllByAltText('GARM'); - - // Light logo should be hidden in dark mode - const lightLogo = logos.find(img => img.classList.contains('dark:hidden')); - expect(lightLogo).toBeInTheDocument(); - - // Dark logo should be shown in dark mode - const darkLogo = logos.find(img => img.classList.contains('dark:block')); - expect(darkLogo).toBeInTheDocument(); - }); - - it('should have theme-aware input styling', () => { - render(LoginPage); - - const usernameInput = screen.getByLabelText('Username'); - - // Should have dark mode classes - expect(usernameInput).toHaveClass('dark:border-gray-600'); - expect(usernameInput).toHaveClass('dark:bg-gray-700'); - expect(usernameInput).toHaveClass('dark:text-white'); - }); - }); - - describe('Responsive Design', () => { - it('should use responsive layout classes', () => { - render(LoginPage); - - // Should have responsive padding - const mainContainer = document.querySelector('.py-12.px-4.sm\\:px-6.lg\\:px-8'); - expect(mainContainer).toBeInTheDocument(); - }); - - it('should handle mobile-friendly layout', () => { - render(LoginPage); - - // Should have mobile-optimized form - const contentArea = document.querySelector('.max-w-md.w-full'); - expect(contentArea).toBeInTheDocument(); - }); - - it('should have responsive typography', () => { - render(LoginPage); - - const heading = screen.getByRole('heading', { name: 'Sign in to GARM' }); - - // Should use responsive text sizing - expect(heading).toHaveClass('text-3xl'); - }); - }); - - describe('Visual Hierarchy', () => { - it('should render elements in proper visual order', () => { - render(LoginPage); - - // Logo should be first - const logoContainer = document.querySelector('.mx-auto.h-48'); - expect(logoContainer).toBeInTheDocument(); - - // Then heading - const heading = screen.getByRole('heading', { name: 'Sign in to GARM' }); - expect(heading).toBeInTheDocument(); - - // Then description - const description = screen.getByText('GitHub Actions Runner Manager'); - expect(description).toBeInTheDocument(); - - // Then form - const form = document.querySelector('form'); - expect(form).toBeInTheDocument(); - }); - - it('should have proper spacing between sections', () => { - render(LoginPage); - - // Main container should have spacing - const contentArea = document.querySelector('.space-y-8'); - expect(contentArea).toBeInTheDocument(); - - // Form should have spacing - const form = document.querySelector('form.space-y-6'); - expect(form).toBeInTheDocument(); - }); - - it('should use consistent typography scale', () => { - render(LoginPage); - - const heading = screen.getByRole('heading', { name: 'Sign in to GARM' }); - const description = screen.getByText('GitHub Actions Runner Manager'); - - // Heading should be larger - expect(heading).toHaveClass('text-3xl', 'font-extrabold'); - - // Description should be smaller - expect(description).toHaveClass('text-sm'); - }); - }); - - describe('Loading State Rendering', () => { - it('should render button in normal state initially', () => { - render(LoginPage); - - const submitButton = screen.getByRole('button', { name: /sign in/i }); - expect(submitButton).not.toBeDisabled(); - expect(screen.getByText('Sign in')).toBeInTheDocument(); - }); - - it('should support loading state styling', () => { - render(LoginPage); - - // Button should be ready to show loading state - const submitButton = screen.getByRole('button', { name: /sign in/i }); - expect(submitButton).toBeInTheDocument(); - }); - - it('should support disabled input states', () => { - render(LoginPage); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - - // Fields should be ready to be disabled - expect(usernameInput).not.toBeDisabled(); - expect(passwordInput).not.toBeDisabled(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/login/page.test.ts b/webapp/src/routes/login/page.test.ts deleted file mode 100644 index 9c7b5148..00000000 --- a/webapp/src/routes/login/page.test.ts +++ /dev/null @@ -1,481 +0,0 @@ -import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; -import { render, screen, fireEvent } from '@testing-library/svelte'; -import LoginPage from './+page.svelte'; - -// Helper function to create complete AuthState objects -function createMockAuthState(overrides: any = {}) { - return { - isAuthenticated: false, - user: null, - loading: false, - needsInitialization: false, - ...overrides - }; -} - -// Mock the page stores -vi.mock('$app/navigation', () => ({ - goto: vi.fn() -})); - -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path: string) => path) -})); - -// Mock the auth store -vi.mock('$lib/stores/auth.js', () => ({ - authStore: { - subscribe: vi.fn((callback: (state: any) => void) => { - callback(createMockAuthState()); - return () => {}; - }) - }, - auth: { - login: vi.fn() - } -})); - -// Mock utilities -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/Button.svelte'); - -// Global setup for each test -let auth: any; -let authStore: any; -let goto: any; -let resolve: any; - -// Mock localStorage -const mockLocalStorage = { - getItem: vi.fn(), - setItem: vi.fn(), - removeItem: vi.fn() -}; - -// Mock window.matchMedia -const mockMatchMedia = vi.fn(); - -describe('Login Page - Unit Tests', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up mocks - const authModule = await import('$lib/stores/auth.js'); - auth = authModule.auth; - authStore = authModule.authStore; - - const navigationModule = await import('$app/navigation'); - goto = navigationModule.goto; - - const pathsModule = await import('$app/paths'); - resolve = pathsModule.resolve; - - // Mock DOM APIs - Object.defineProperty(window, 'localStorage', { value: mockLocalStorage }); - Object.defineProperty(window, 'matchMedia', { value: mockMatchMedia }); - - // Set up default API mocks - (auth.login as any).mockResolvedValue({}); - (resolve as any).mockImplementation((path: string) => path); - (mockLocalStorage.getItem as any).mockReturnValue(null); - (mockMatchMedia as any).mockReturnValue({ matches: false }); - }); - - afterEach(() => { - vi.restoreAllMocks(); - }); - - describe('Component Initialization', () => { - it('should render successfully', () => { - const { container } = render(LoginPage); - expect(container).toBeInTheDocument(); - }); - - it('should set page title', () => { - render(LoginPage); - expect(document.title).toBe('Login - GARM'); - }); - - it('should render login form elements', () => { - render(LoginPage); - - expect(screen.getByLabelText('Username')).toBeInTheDocument(); - expect(screen.getByLabelText('Password')).toBeInTheDocument(); - expect(screen.getByRole('button', { name: /sign in/i })).toBeInTheDocument(); - }); - - it('should render GARM logo and branding', () => { - render(LoginPage); - - expect(screen.getByText('Sign in to GARM')).toBeInTheDocument(); - expect(screen.getByText('GitHub Actions Runner Manager')).toBeInTheDocument(); - expect(screen.getAllByAltText('GARM')).toHaveLength(2); // Light and dark logos - }); - }); - - describe('Theme Initialization', () => { - it('should render component successfully', () => { - render(LoginPage); - - // Theme functionality works in browser but is hard to test in Node environment - // Focus on ensuring component renders without errors - expect(screen.getByRole('heading', { name: 'Sign in to GARM' })).toBeInTheDocument(); - }); - - it('should have theme-aware styling classes', () => { - render(LoginPage); - - // Should have dark mode classes ready - const heading = screen.getByRole('heading', { name: 'Sign in to GARM' }); - expect(heading).toHaveClass('dark:text-white'); - }); - - it('should render both theme logo variants', () => { - render(LoginPage); - - const logos = screen.getAllByAltText('GARM'); - expect(logos).toHaveLength(2); // Light and dark variants - }); - }); - - describe('Authentication Redirect', () => { - it('should redirect when user is already authenticated', () => { - vi.mocked(authStore.subscribe).mockImplementation((callback: (state: any) => void) => { - callback(createMockAuthState({ isAuthenticated: true, user: 'testuser' })); - return () => {}; - }); - - render(LoginPage); - - expect(goto).toHaveBeenCalledWith('/'); - }); - - it('should not redirect when user is not authenticated', () => { - vi.mocked(authStore.subscribe).mockImplementation((callback: (state: any) => void) => { - callback(createMockAuthState({ isAuthenticated: false })); - return () => {}; - }); - - render(LoginPage); - - expect(goto).not.toHaveBeenCalled(); - }); - }); - - describe('Form Validation', () => { - it('should have required form fields', () => { - render(LoginPage); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - - // Fields should have required attribute - expect(usernameInput).toHaveAttribute('required'); - expect(passwordInput).toHaveAttribute('required'); - }); - - it('should validate empty form submission', async () => { - render(LoginPage); - - const submitButton = screen.getByRole('button', { name: /sign in/i }); - - // Submit form without entering anything - await fireEvent.click(submitButton); - - // Should not call auth API for empty form - expect(auth.login).not.toHaveBeenCalled(); - }); - - it('should have proper form structure for validation', () => { - render(LoginPage); - - const form = document.querySelector('form'); - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - - expect(form).toBeInTheDocument(); - expect(usernameInput).toHaveAttribute('name', 'username'); - expect(passwordInput).toHaveAttribute('name', 'password'); - }); - }); - - describe('Login Functionality', () => { - it('should call auth.login with correct credentials on successful login', async () => { - render(LoginPage); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - const submitButton = screen.getByRole('button', { name: /sign in/i }); - - // Enter credentials - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - - // Submit form - submitButton.click(); - - expect(auth.login).toHaveBeenCalledWith('testuser', 'password123'); - }); - - it('should redirect to home on successful login', async () => { - render(LoginPage); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - const submitButton = screen.getByRole('button', { name: /sign in/i }); - - // Enter credentials - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - - // Submit form - submitButton.click(); - - // Wait for async operations - await new Promise(resolve => setTimeout(resolve, 0)); - - expect(goto).toHaveBeenCalledWith('/'); - }); - - it('should handle login API errors', async () => { - const error = new Error('Invalid credentials'); - (auth.login as any).mockRejectedValue(error); - - render(LoginPage); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - const submitButton = screen.getByRole('button', { name: /sign in/i }); - - // Enter credentials - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'wrongpassword' } }); - - // Submit form - submitButton.click(); - - // Wait for error to appear - await screen.findByText('Invalid credentials'); - expect(goto).not.toHaveBeenCalled(); - }); - }); - - describe('Loading States', () => { - it('should show loading state during login', async () => { - // Mock auth.login to return a promise that doesn't resolve immediately - let resolveLogin: () => void; - const loginPromise = new Promise((resolve) => { - resolveLogin = resolve; - }); - (auth.login as any).mockReturnValue(loginPromise); - - render(LoginPage); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - const submitButton = screen.getByRole('button', { name: /sign in/i }); - - // Enter credentials - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - - // Submit form - await fireEvent.click(submitButton); - - // Should show loading state - inputs disabled and button shows loading - expect(usernameInput).toBeDisabled(); - expect(passwordInput).toBeDisabled(); - - // Button should show loading text (may be inside component structure) - await screen.findByText('Signing in...'); - - // Complete the login - resolveLogin!(); - await loginPromise; - }); - - it('should clear loading state after login failure', async () => { - const error = new Error('Login failed'); - (auth.login as any).mockRejectedValue(error); - - render(LoginPage); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - const submitButton = screen.getByRole('button', { name: /sign in/i }); - - // Enter credentials and submit - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - submitButton.click(); - - // Wait for error handling - await screen.findByText('Login failed'); - - // Should not be in loading state anymore - expect(screen.queryByText('Signing in...')).not.toBeInTheDocument(); - expect(screen.getByText('Sign in')).toBeInTheDocument(); - expect(usernameInput).not.toBeDisabled(); - expect(passwordInput).not.toBeDisabled(); - }); - }); - - describe('Keyboard Interactions', () => { - it('should submit form when Enter is pressed in username field', async () => { - render(LoginPage); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - - // Enter credentials - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - - // Press Enter in username field - await fireEvent.keyPress(usernameInput, { key: 'Enter' }); - - expect(auth.login).toHaveBeenCalledWith('testuser', 'password123'); - }); - - it('should submit form when Enter is pressed in password field', async () => { - render(LoginPage); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - - // Enter credentials - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - - // Press Enter in password field - await fireEvent.keyPress(passwordInput, { key: 'Enter' }); - - expect(auth.login).toHaveBeenCalledWith('testuser', 'password123'); - }); - - it('should not submit on non-Enter key press', async () => { - render(LoginPage); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - - // Enter credentials - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - - // Press non-Enter key - await fireEvent.keyPress(usernameInput, { key: ' ' }); - - expect(auth.login).not.toHaveBeenCalled(); - }); - }); - - describe('Error Display', () => { - it('should clear error when starting new login attempt', async () => { - // First, cause an error - const error = new Error('Login failed'); - (auth.login as any).mockRejectedValue(error); - - render(LoginPage); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - const submitButton = screen.getByRole('button', { name: /sign in/i }); - - // Trigger error - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - await fireEvent.click(submitButton); - - await screen.findByText('Login failed'); - - // Now mock success and try again - (auth.login as any).mockResolvedValue({}); - await fireEvent.click(submitButton); - - // Wait for async operations and error should be cleared - await new Promise(resolve => setTimeout(resolve, 0)); - expect(screen.queryByText('Login failed')).not.toBeInTheDocument(); - }); - - it('should display API errors with proper formatting', async () => { - const error = new Error('Server temporarily unavailable'); - (auth.login as any).mockRejectedValue(error); - - render(LoginPage); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - const submitButton = screen.getByRole('button', { name: /sign in/i }); - - // Enter credentials and submit - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - submitButton.click(); - - // Should display error message - const errorElement = await screen.findByText('Server temporarily unavailable'); - expect(errorElement).toBeInTheDocument(); - - // Should have proper error styling - const errorContainer = errorElement.closest('.bg-red-50'); - expect(errorContainer).toBeInTheDocument(); - }); - }); - - describe('Component Lifecycle', () => { - it('should mount successfully', () => { - const { component } = render(LoginPage); - expect(component).toBeDefined(); - }); - - it('should unmount without errors', () => { - const { unmount } = render(LoginPage); - expect(() => unmount()).not.toThrow(); - }); - - it('should subscribe to auth store on mount', () => { - render(LoginPage); - expect(authStore.subscribe).toHaveBeenCalled(); - }); - }); - - describe('Form State Management', () => { - it('should maintain form state during interactions', async () => { - render(LoginPage); - - const usernameInput = screen.getByLabelText('Username') as HTMLInputElement; - const passwordInput = screen.getByLabelText('Password') as HTMLInputElement; - - // Enter values - await fireEvent.input(usernameInput, { target: { value: 'testuser' } }); - await fireEvent.input(passwordInput, { target: { value: 'password123' } }); - - // Values should be maintained - expect(usernameInput.value).toBe('testuser'); - expect(passwordInput.value).toBe('password123'); - }); - - it('should support loading state functionality', async () => { - render(LoginPage); - - const usernameInput = screen.getByLabelText('Username'); - const passwordInput = screen.getByLabelText('Password'); - const submitButton = screen.getByRole('button', { name: /sign in/i }); - - // Fields should be enabled initially - expect(usernameInput).not.toBeDisabled(); - expect(passwordInput).not.toBeDisabled(); - expect(submitButton).toHaveTextContent('Sign in'); - - // Component should be ready to handle loading states - // (actual loading behavior is tested in integration tests) - expect(usernameInput).toHaveAttribute('type', 'text'); - expect(passwordInput).toHaveAttribute('type', 'password'); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/organizations/+page.svelte b/webapp/src/routes/organizations/+page.svelte deleted file mode 100644 index b7906665..00000000 --- a/webapp/src/routes/organizations/+page.svelte +++ /dev/null @@ -1,372 +0,0 @@ - - - - Organizations - GARM - - -
                - - - - - - - {@const status = getEntityStatusBadge(organization)} - - - - -
                - - -{#if showCreateModal} - showCreateModal = false} - on:submit={handleCreateOrganization} - /> -{/if} - -{#if showUpdateModal && selectedOrganization} - { showUpdateModal = false; selectedOrganization = null; }} - on:submit={(e) => handleUpdateOrganization(e.detail)} - /> -{/if} - -{#if showDeleteModal && selectedOrganization} - { showDeleteModal = false; selectedOrganization = null; }} - on:confirm={handleDeleteOrganization} - /> -{/if} \ No newline at end of file diff --git a/webapp/src/routes/organizations/[id]/+page.svelte b/webapp/src/routes/organizations/[id]/+page.svelte deleted file mode 100644 index 1d130de8..00000000 --- a/webapp/src/routes/organizations/[id]/+page.svelte +++ /dev/null @@ -1,402 +0,0 @@ - - - - {organization ? `${organization.name} - Organization Details` : 'Organization Details'} - GARM - - -
                - - - - {#if loading} -
                -
                -

                Loading organization...

                -
                - {:else if error} -
                -

                {error}

                -
                - {:else if organization} - - showUpdateModal = true} - onDelete={() => showDeleteModal = true} - /> - - - - - - - - - - - - - - - - {/if} -
                - - -{#if showUpdateModal && organization} - showUpdateModal = false} - on:submit={(e) => handleUpdate(e.detail)} - /> -{/if} - -{#if showDeleteModal && organization} - showDeleteModal = false} - on:confirm={handleDelete} - /> -{/if} - -{#if showDeleteInstanceModal && selectedInstance} - { showDeleteInstanceModal = false; selectedInstance = null; }} - on:confirm={handleDeleteInstance} - /> -{/if} - -{#if showCreatePoolModal && organization} - showCreatePoolModal = false} - on:submit={handleCreatePool} - /> -{/if} \ No newline at end of file diff --git a/webapp/src/routes/organizations/[id]/page.integration.test.ts b/webapp/src/routes/organizations/[id]/page.integration.test.ts deleted file mode 100644 index c93b0f51..00000000 --- a/webapp/src/routes/organizations/[id]/page.integration.test.ts +++ /dev/null @@ -1,614 +0,0 @@ -import { describe, it, expect, beforeEach, vi } from 'vitest'; -import { render, screen, waitFor } from '@testing-library/svelte'; -import userEvent from '@testing-library/user-event'; -import '@testing-library/jest-dom'; -import { createMockOrganization, createMockPool, createMockInstance } from '../../../test/factories.js'; - -// Create comprehensive test data -const mockOrganization = createMockOrganization({ - id: 'org-123', - name: 'test-org', - events: [ - { - id: 1, - created_at: '2024-01-01T00:00:00Z', - event_level: 'info', - message: 'Organization created' - }, - { - id: 2, - created_at: '2024-01-01T01:00:00Z', - event_level: 'warning', - message: 'Pool configuration changed' - } - ], - pool_manager_status: { running: true, failure_reason: undefined } -}); - -const mockPools = [ - createMockPool({ - id: 'pool-1', - org_id: 'org-123', - image: 'ubuntu:22.04', - enabled: true - }), - createMockPool({ - id: 'pool-2', - org_id: 'org-123', - image: 'ubuntu:20.04', - enabled: false - }) -]; - -const mockInstances = [ - createMockInstance({ - id: 'inst-1', - name: 'runner-1', - pool_id: 'pool-1', - status: 'running' - }), - createMockInstance({ - id: 'inst-2', - name: 'runner-2', - pool_id: 'pool-2', - status: 'idle' - }) -]; - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/UpdateEntityModal.svelte'); -vi.unmock('$lib/components/DeleteModal.svelte'); -vi.unmock('$lib/components/EntityInformation.svelte'); -vi.unmock('$lib/components/DetailHeader.svelte'); -vi.unmock('$lib/components/PoolsSection.svelte'); -vi.unmock('$lib/components/InstancesSection.svelte'); -vi.unmock('$lib/components/EventsSection.svelte'); -vi.unmock('$lib/components/WebhookSection.svelte'); -vi.unmock('$lib/components/CreatePoolModal.svelte'); -vi.unmock('$lib/components/cells'); - -// Only mock the data layer - APIs and stores -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - getOrganization: vi.fn(), - listOrganizationPools: vi.fn(), - listOrganizationInstances: vi.fn(), - updateOrganization: vi.fn(), - deleteOrganization: vi.fn(), - deleteInstance: vi.fn(), - createOrganizationPool: vi.fn(), - getOrganizationWebhookInfo: vi.fn().mockResolvedValue({ installed: false }) - } -})); - -vi.mock('$lib/stores/websocket.js', () => ({ - websocketStore: { - subscribe: vi.fn((callback) => { - callback({ connected: true, connecting: false, error: null }); - return () => {}; - }), - subscribeToEntity: vi.fn(() => vi.fn()) - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn(), - warning: vi.fn() - } -})); - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback) => { - callback({ - organizations: [], - pools: [], - instances: [], - loaded: { organizations: false, pools: false, instances: false }, - loading: { organizations: false, pools: false, instances: false }, - errorMessages: { organizations: '', pools: '', instances: '' } - }); - return () => {}; - }) - }, - eagerCacheManager: { - getOrganizations: vi.fn(), - getPools: vi.fn(), - getInstances: vi.fn(), - retryResource: vi.fn() - } -})); - -// Mock SvelteKit modules -vi.mock('$app/stores', () => ({ - page: { - subscribe: vi.fn((callback) => { - callback({ params: { id: 'org-123' } }); - return () => {}; - }) - } -})); - -vi.mock('$app/navigation', () => ({ - goto: vi.fn() -})); - -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path) => path) -})); - -// Import the organization details page with real UI components -import OrganizationDetailsPage from './+page.svelte'; - -describe('Comprehensive Integration Tests for Organization Details Page', () => { - let garmApi: any; - - beforeEach(async () => { - vi.clearAllMocks(); - - const apiClient = await import('$lib/api/client.js'); - garmApi = apiClient.garmApi; - - // Set up successful API responses - garmApi.getOrganization.mockResolvedValue(mockOrganization); - garmApi.listOrganizationPools.mockResolvedValue(mockPools); - garmApi.listOrganizationInstances.mockResolvedValue(mockInstances); - garmApi.updateOrganization.mockResolvedValue({}); - garmApi.deleteOrganization.mockResolvedValue({}); - garmApi.deleteInstance.mockResolvedValue({}); - garmApi.createOrganizationPool.mockResolvedValue({ id: 'new-pool' }); - }); - - describe('Component Rendering and Data Display', () => { - it('should render organization details page with real components', async () => { - const { container } = render(OrganizationDetailsPage); - - // Should render main container - expect(container.querySelector('.space-y-6')).toBeInTheDocument(); - - // Should render breadcrumbs - expect(screen.getByText('Organizations')).toBeInTheDocument(); - - // Should handle loading state initially - await waitFor(() => { - expect(container).toBeInTheDocument(); - }); - }); - - it('should display organization information correctly', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // Should display organization name in breadcrumb or title - const titleElement = document.querySelector('title'); - expect(titleElement?.textContent).toContain('Organization Details'); - }); - }); - - it('should render breadcrumb navigation', async () => { - render(OrganizationDetailsPage); - - // Should show breadcrumb navigation - expect(screen.getByText('Organizations')).toBeInTheDocument(); - - // Breadcrumb should be clickable link - const organizationsLink = screen.getByText('Organizations').closest('a'); - expect(organizationsLink).toHaveAttribute('href', '/organizations'); - }); - - it('should display loading state correctly', async () => { - render(OrganizationDetailsPage); - - // Should show loading indicator initially - // Loading text might appear briefly or not at all in fast tests - expect(document.body).toBeInTheDocument(); - }); - }); - - describe('Error State Handling', () => { - it('should handle organization not found error', async () => { - garmApi.getOrganization.mockRejectedValue(new Error('Organization not found')); - - render(OrganizationDetailsPage); - - await waitFor(() => { - // Should display error message - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle API errors gracefully', async () => { - garmApi.getOrganization.mockRejectedValue(new Error('API Error')); - garmApi.listOrganizationPools.mockRejectedValue(new Error('Pools Error')); - garmApi.listOrganizationInstances.mockRejectedValue(new Error('Instances Error')); - - render(OrganizationDetailsPage); - - await waitFor(() => { - // Component should render without crashing - expect(document.body).toBeInTheDocument(); - }); - }); - }); - - describe('Organization Information Display', () => { - it('should display organization details when loaded', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // Should display the organization information section - expect(document.body).toBeInTheDocument(); - }, { timeout: 3000 }); - }); - - it('should show forge icon and endpoint information', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // Should render forge-specific information - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should display organization status correctly', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // Should show pool manager status - expect(document.body).toBeInTheDocument(); - }); - }); - }); - - describe('Modal Interactions', () => { - it('should handle edit button click', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // Look for edit button (might be in DetailHeader component) - const editButtons = document.querySelectorAll('button, [role="button"]'); - expect(editButtons.length).toBeGreaterThan(0); - }); - }); - - it('should handle delete button click', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // Look for delete button - const deleteButtons = document.querySelectorAll('button, [role="button"]'); - expect(deleteButtons.length).toBeGreaterThan(0); - }); - }); - }); - - describe('Pools Section Integration', () => { - it('should display pools section with data', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // Should render pools section - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle add pool button', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // Look for add pool functionality - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should display pools section and integrate with pools data', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // Wait for organization and pools data to load - expect(garmApi.getOrganization).toHaveBeenCalledWith('org-123'); - expect(garmApi.listOrganizationPools).toHaveBeenCalledWith('org-123'); - }); - - // Verify the component displays the pools section showing the correct count - // This confirms the component properly integrates with the API to load and display pool data - const poolsSection = screen.getByText('Pools (2)'); - expect(poolsSection).toBeInTheDocument(); - }); - }); - - describe('Instances Section Integration', () => { - it('should display instances section with data', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // Should render instances section - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle instance deletion', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // Look for instance management functionality - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should show error handling structure for instance deletion', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - // Set up API to fail when deleteInstance is called - const error = new Error('Instance deletion failed'); - garmApi.deleteInstance.mockRejectedValue(error); - - render(OrganizationDetailsPage); - - await waitFor(() => { - // Wait for organization and instances data to load - expect(garmApi.getOrganization).toHaveBeenCalledWith('org-123'); - expect(garmApi.listOrganizationInstances).toHaveBeenCalledWith('org-123'); - }); - - // Verify the component has the proper structure for instance deletion error handling - // The handleDeleteInstance function should be set up to show error toasts - const instancesSection = screen.getByText('Instances (2)'); - expect(instancesSection).toBeInTheDocument(); - - // Verify there are delete buttons available for instances - const deleteButtons = screen.getAllByRole('button', { name: /delete/i }); - expect(deleteButtons.length).toBeGreaterThan(0); - - // The error handling workflow is: - // 1. User clicks delete button → modal opens - // 2. User confirms deletion → handleDeleteInstance() is called - // 3. handleDeleteInstance() calls API and catches errors - // 4. On error, toastStore.error is called with 'Delete Failed' message - // This structure is verified by the component rendering successfully - expect(toastStore.error).toBeDefined(); - }); - }); - - describe('Events Section Integration', () => { - it('should display events section with event data', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // Should render events section - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle events scrolling', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // Should handle events display and scrolling - expect(document.body).toBeInTheDocument(); - }); - }); - }); - - describe('Webhook Section Integration', () => { - it('should display webhook section', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // Should render webhook section - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle webhook management', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // Should provide webhook management functionality - expect(document.body).toBeInTheDocument(); - }); - }); - }); - - describe('Real-time Updates via WebSocket', () => { - it('should set up websocket subscriptions', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // Should set up websocket subscriptions - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle organization update events', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // Component should be prepared to handle websocket updates - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle pool and instance events', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // Should handle pool and instance websocket events - expect(document.body).toBeInTheDocument(); - }); - }); - }); - - describe('API Integration', () => { - it('should call organization APIs when component mounts and display data', async () => { - render(OrganizationDetailsPage); - - // Wait for API calls to complete and data to be displayed - await waitFor(() => { - // Verify the component actually called the APIs to load data - expect(garmApi.getOrganization).toHaveBeenCalledWith('org-123'); - expect(garmApi.listOrganizationPools).toHaveBeenCalledWith('org-123'); - expect(garmApi.listOrganizationInstances).toHaveBeenCalledWith('org-123'); - - // More importantly, verify the component displays the loaded data - expect(screen.getByRole('heading', { name: 'test-org' })).toBeInTheDocument(); - expect(screen.getByText('Pools (2)')).toBeInTheDocument(); - expect(screen.getByText('Instances (2)')).toBeInTheDocument(); - }); - }); - - it('should display loading state initially then show data', async () => { - // Mock delayed API responses - garmApi.getOrganization.mockImplementation(() => - new Promise(resolve => setTimeout(() => resolve(mockOrganization), 100)) - ); - - render(OrganizationDetailsPage); - - // Initially, the organization name should not be visible yet - expect(screen.queryByRole('heading', { name: 'test-org' })).not.toBeInTheDocument(); - - // After API resolves, should show actual data - await waitFor(() => { - expect(screen.getByRole('heading', { name: 'test-org' })).toBeInTheDocument(); - }, { timeout: 1000 }); - - // Data should be properly displayed after loading - expect(screen.getByText('Pools (2)')).toBeInTheDocument(); - expect(screen.getByText('Instances (2)')).toBeInTheDocument(); - }); - - it('should handle API errors and display error state', async () => { - // Mock API to fail - const error = new Error('Failed to load organization'); - garmApi.getOrganization.mockRejectedValue(error); - - const { container } = render(OrganizationDetailsPage); - - // Wait for error to be handled and displayed - await waitFor(() => { - // Should show error state in the UI (red background, error message) - const errorElement = container.querySelector('.bg-red-50, .bg-red-900, .text-red-600, .text-red-400'); - expect(errorElement).toBeInTheDocument(); - }); - }); - - it('should integrate with websocket store for real-time updates', async () => { - const { websocketStore } = await import('$lib/stores/websocket.js'); - - render(OrganizationDetailsPage); - - await waitFor(() => { - // Verify component subscribes to websocket updates for organization, pools, and instances - // Based on the error output, the actual calls are: - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith('organization', ['update', 'delete'], expect.any(Function)); - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith('pool', ['create', 'update', 'delete'], expect.any(Function)); - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith('instance', ['create', 'update', 'delete'], expect.any(Function)); - }); - - // The component properly sets up websocket integration to receive real-time updates - // This is verified by the subscription calls above and by the component's ability - // to display data that would be updated via websockets - expect(screen.getByRole('heading', { name: 'test-org' })).toBeInTheDocument(); - }); - }); - - describe('Component Integration and State Management', () => { - it('should integrate all sections with proper data flow', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // All sections should integrate properly with the main page - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should maintain consistent state across components', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // State should be consistent across all child components - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle component lifecycle correctly', async () => { - const { unmount } = render(OrganizationDetailsPage); - - await waitFor(() => { - // Component should mount successfully - expect(document.body).toBeInTheDocument(); - }); - - // Should unmount cleanly - expect(() => unmount()).not.toThrow(); - }); - }); - - describe('User Interaction Flows', () => { - it('should support navigation interactions', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // Should support breadcrumb navigation - const orgLink = screen.getByText('Organizations'); - expect(orgLink).toBeInTheDocument(); - }); - }); - - it('should handle keyboard navigation', async () => { - const user = userEvent.setup(); - render(OrganizationDetailsPage); - - await waitFor(() => { - // Should support keyboard navigation - expect(document.body).toBeInTheDocument(); - }); - - // Test tab navigation - await user.tab(); - }); - - it('should handle form submissions and modal interactions', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // Should handle modal and form interactions - expect(document.body).toBeInTheDocument(); - }); - }); - }); - - describe('Accessibility and Responsive Design', () => { - it('should have proper accessibility attributes', async () => { - const { container } = render(OrganizationDetailsPage); - - await waitFor(() => { - // Should have proper ARIA labels and navigation - const nav = container.querySelector('nav[aria-label="Breadcrumb"]'); - expect(nav).toBeInTheDocument(); - }); - }); - - it('should be responsive across different viewport sizes', async () => { - const { container } = render(OrganizationDetailsPage); - - await waitFor(() => { - // Should render responsively - expect(container).toBeInTheDocument(); - }); - }); - - it('should handle screen reader compatibility', async () => { - render(OrganizationDetailsPage); - - await waitFor(() => { - // Should be compatible with screen readers - expect(document.body).toBeInTheDocument(); - }); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/organizations/[id]/page.render.test.ts b/webapp/src/routes/organizations/[id]/page.render.test.ts deleted file mode 100644 index 7df7095e..00000000 --- a/webapp/src/routes/organizations/[id]/page.render.test.ts +++ /dev/null @@ -1,182 +0,0 @@ -import { describe, it, expect, beforeEach, vi } from 'vitest'; -import { render } from '@testing-library/svelte'; -import { createMockOrganization } from '../../../test/factories.js'; - -// Mock all external dependencies but keep the component rendering real -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - getOrganization: vi.fn(), - listOrganizationPools: vi.fn(), - listOrganizationInstances: vi.fn(), - updateOrganization: vi.fn(), - deleteOrganization: vi.fn(), - deleteInstance: vi.fn(), - createOrganizationPool: vi.fn(), - getOrganizationWebhookInfo: vi.fn().mockResolvedValue({ installed: false }) - } -})); - -vi.mock('$lib/stores/websocket.js', () => ({ - websocketStore: { - subscribeToEntity: vi.fn(() => vi.fn()) - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn(), - warning: vi.fn() - } -})); - -// Mock SvelteKit modules -vi.mock('$app/stores', () => ({ - page: { - subscribe: vi.fn((callback) => { - callback({ params: { id: 'org-123' } }); - return () => {}; - }) - } -})); - -vi.mock('$app/navigation', () => ({ - goto: vi.fn() -})); - -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path) => path) -})); - -vi.mock('$app/environment', () => ({ - browser: false, - dev: true, - building: false -})); - -// Mock child components -vi.mock('$lib/components/UpdateEntityModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/DeleteModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/EntityInformation.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/DetailHeader.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/PoolsSection.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/InstancesSection.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/EventsSection.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/WebhookSection.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/CreatePoolModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/utils/common.js', () => ({ - getForgeIcon: vi.fn((type) => ``) -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((error) => error.message || 'API Error') -})); - -import OrganizationDetailsPage from './+page.svelte'; - -describe('Organization Details Page Rendering Tests', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - const mockOrganization = createMockOrganization({ - id: 'org-123', - name: 'test-org' - }); - - const { garmApi } = await import('$lib/api/client.js'); - (garmApi.getOrganization as any).mockResolvedValue(mockOrganization); - (garmApi.listOrganizationPools as any).mockResolvedValue([]); - (garmApi.listOrganizationInstances as any).mockResolvedValue([]); - }); - - describe('Component Rendering', () => { - it('should render without crashing', () => { - const { container } = render(OrganizationDetailsPage); - expect(container).toBeInTheDocument(); - }); - - it('should render as a valid DOM element', () => { - const { container } = render(OrganizationDetailsPage); - expect(container.firstChild).toBeInstanceOf(HTMLElement); - }); - - it('should have proper document title', () => { - render(OrganizationDetailsPage); - expect(document.title).toContain('Organization Details'); - }); - - it('should render with correct structure', () => { - const { container } = render(OrganizationDetailsPage); - expect(container.firstChild).toHaveClass('space-y-6'); - }); - - it('should handle empty state rendering', () => { - render(OrganizationDetailsPage); - // Component should render even with no organization data loaded - expect(document.body).toBeInTheDocument(); - }); - }); - - describe('Component Lifecycle', () => { - it('should mount successfully', () => { - const component = render(OrganizationDetailsPage); - expect(component.component).toBeDefined(); - }); - - it('should unmount without errors', () => { - const { unmount } = render(OrganizationDetailsPage); - expect(() => unmount()).not.toThrow(); - }); - }); - - describe('DOM Structure Validation', () => { - it('should create proper HTML structure', () => { - const { container } = render(OrganizationDetailsPage); - - // Should have main container with proper spacing - expect(container.querySelector('.space-y-6')).toBeInTheDocument(); - }); - - it('should handle conditional rendering', () => { - const { container } = render(OrganizationDetailsPage); - - // Component should render without any modals open initially - expect(container).toBeInTheDocument(); - }); - - it('should render with proper accessibility structure', () => { - const { container } = render(OrganizationDetailsPage); - - // Basic accessibility checks - expect(container).toBeInTheDocument(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/organizations/[id]/page.test.ts b/webapp/src/routes/organizations/[id]/page.test.ts deleted file mode 100644 index abc7c315..00000000 --- a/webapp/src/routes/organizations/[id]/page.test.ts +++ /dev/null @@ -1,525 +0,0 @@ -import { describe, it, expect, beforeEach, vi } from 'vitest'; -import { render } from '@testing-library/svelte'; -import { createMockOrganization, createMockInstance } from '../../../test/factories.js'; - -// Mock all external dependencies -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - getOrganization: vi.fn(), - listOrganizationPools: vi.fn(), - listOrganizationInstances: vi.fn(), - updateOrganization: vi.fn(), - deleteOrganization: vi.fn(), - deleteInstance: vi.fn(), - createOrganizationPool: vi.fn(), - getOrganizationWebhookInfo: vi.fn().mockResolvedValue({ installed: false }) - } -})); - -vi.mock('$lib/stores/websocket.js', () => ({ - websocketStore: { - subscribeToEntity: vi.fn(() => vi.fn()) - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn(), - warning: vi.fn() - } -})); - -// Mock SvelteKit modules -vi.mock('$app/stores', () => ({ - page: { - subscribe: vi.fn((callback) => { - callback({ params: { id: 'org-123' } }); - return () => {}; - }) - } -})); - -vi.mock('$app/navigation', () => ({ - goto: vi.fn() -})); - -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path) => path) -})); - -vi.mock('$app/environment', () => ({ - browser: false, - dev: true, - building: false -})); - -// Mock all child components -vi.mock('$lib/components/UpdateEntityModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/DeleteModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/EntityInformation.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/DetailHeader.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/PoolsSection.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/InstancesSection.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/EventsSection.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/WebhookSection.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/CreatePoolModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/utils/common.js', () => ({ - getForgeIcon: vi.fn((type) => ``) -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((error) => error.message || 'API Error') -})); - -import OrganizationDetailsPage from './+page.svelte'; - -describe('Organization Details Page Unit Tests', () => { - let mockOrganization: any; - let mockPools: any[]; - let mockInstances: any[]; - - beforeEach(async () => { - vi.clearAllMocks(); - - mockOrganization = createMockOrganization({ - id: 'org-123', - name: 'test-org', - events: [ - { - id: 1, - created_at: '2024-01-01T00:00:00Z', - event_level: 'info', - message: 'Organization created' - } - ] - }); - - mockPools = [ - { id: 'pool-1', org_id: 'org-123', image: 'ubuntu:22.04' }, - { id: 'pool-2', org_id: 'org-123', image: 'ubuntu:20.04' } - ]; - - mockInstances = [ - createMockInstance({ id: 'inst-1', pool_id: 'pool-1' }), - createMockInstance({ id: 'inst-2', pool_id: 'pool-2' }) - ]; - - const { garmApi } = await import('$lib/api/client.js'); - (garmApi.getOrganization as any).mockResolvedValue(mockOrganization); - (garmApi.listOrganizationPools as any).mockResolvedValue(mockPools); - (garmApi.listOrganizationInstances as any).mockResolvedValue(mockInstances); - }); - - describe('Component Structure', () => { - it('should render organization details page', () => { - const { container } = render(OrganizationDetailsPage); - expect(container).toBeInTheDocument(); - }); - - it('should set dynamic page title', () => { - render(OrganizationDetailsPage); - // Title should be dynamic based on organization name - expect(document.title).toContain('Organization Details'); - }); - - it('should have organization state variables', () => { - const component = render(OrganizationDetailsPage); - expect(component).toBeDefined(); - }); - }); - - describe('Data Loading', () => { - it('should have API functions available for data loading', async () => { - const { garmApi } = await import('$lib/api/client.js'); - render(OrganizationDetailsPage); - - // Verify API functions are properly mocked and available - expect(garmApi.getOrganization).toBeDefined(); - expect(garmApi.listOrganizationPools).toBeDefined(); - expect(garmApi.listOrganizationInstances).toBeDefined(); - }); - - it('should handle loading states correctly', () => { - const { container } = render(OrganizationDetailsPage); - // Component should handle initial loading state - expect(container).toBeInTheDocument(); - expect(document.title).toContain('Organization Details'); - }); - - it('should have error handling capabilities', async () => { - const { extractAPIError } = await import('$lib/utils/apiError'); - - render(OrganizationDetailsPage); - - // Verify error handling utility is available - const error = new Error('Test error'); - const result = extractAPIError(error); - expect(extractAPIError).toHaveBeenCalledWith(error); - expect(result).toBe('Test error'); - }); - }); - - describe('Organization Updates', () => { - it('should have proper structure for organization updates', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(OrganizationDetailsPage); - - // Unit tests verify the component has access to the right dependencies - // The actual update workflow is tested in integration tests where we can - // trigger the real handleUpdate function via UI interactions - expect(garmApi.updateOrganization).toBeDefined(); - }); - - it('should show success toast after update', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(OrganizationDetailsPage); - - toastStore.success( - 'Organization Updated', - 'Organization test-org has been updated successfully.' - ); - - expect(toastStore.success).toHaveBeenCalledWith( - 'Organization Updated', - 'Organization test-org has been updated successfully.' - ); - }); - - it('should have proper error handling structure for updates', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(OrganizationDetailsPage); - - // Unit tests verify the component has access to the right dependencies - // The actual error re-throwing behavior is tested through integration tests - // where we can trigger the real handleUpdate function via modal events - expect(garmApi.updateOrganization).toBeDefined(); - }); - }); - - describe('Organization Deletion', () => { - it('should have proper structure for organization deletion', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(OrganizationDetailsPage); - - // Unit tests verify the component has access to the right dependencies - // The actual deletion workflow is tested in integration tests where we can - // trigger the real handleDelete function via modal interactions - expect(garmApi.deleteOrganization).toBeDefined(); - }); - - it('should redirect after successful deletion', async () => { - const { goto } = await import('$app/navigation'); - - render(OrganizationDetailsPage); - - goto('/organizations'); - expect(goto).toHaveBeenCalledWith('/organizations'); - }); - - it('should display error message when organization loading fails', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - // Simulate API error during organization loading - const error = new Error('Organization not found'); - (garmApi.getOrganization as any).mockRejectedValue(error); - - const { container } = render(OrganizationDetailsPage); - - // Wait for the component to handle the error - await new Promise(resolve => setTimeout(resolve, 100)); - - // Check that error message is displayed in the UI - const errorElement = container.querySelector('.bg-red-50, .bg-red-900'); - expect(errorElement).toBeInTheDocument(); - }); - }); - - describe('Instance Management', () => { - it('should have proper structure for instance deletion', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(OrganizationDetailsPage); - - // Unit tests verify the component has access to the right dependencies - // The actual instance deletion workflow is tested in integration tests - expect(garmApi.deleteInstance).toBeDefined(); - }); - - it('should show success toast after instance deletion', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(OrganizationDetailsPage); - - toastStore.success( - 'Instance Deleted', - 'Instance inst-1 has been deleted successfully.' - ); - - expect(toastStore.success).toHaveBeenCalledWith( - 'Instance Deleted', - 'Instance inst-1 has been deleted successfully.' - ); - }); - - it('should have proper error handling structure for instance deletion', async () => { - const { garmApi } = await import('$lib/api/client.js'); - const { toastStore } = await import('$lib/stores/toast.js'); - - render(OrganizationDetailsPage); - - // Unit tests verify the component has access to the right dependencies - // Detailed error handling with UI interactions is tested in integration tests - expect(garmApi.deleteInstance).toBeDefined(); - expect(toastStore.error).toBeDefined(); - }); - }); - - describe('Pool Creation', () => { - it('should have proper structure for pool creation', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(OrganizationDetailsPage); - - // Unit tests verify the component has access to the right dependencies - // The actual pool creation workflow is tested in integration tests where we can - // trigger the real handleCreatePool function via component events - expect(garmApi.createOrganizationPool).toBeDefined(); - }); - - it('should show success toast after pool creation', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(OrganizationDetailsPage); - - toastStore.success( - 'Pool Created', - 'Pool has been created successfully for organization test-org.' - ); - - expect(toastStore.success).toHaveBeenCalledWith( - 'Pool Created', - 'Pool has been created successfully for organization test-org.' - ); - }); - - it('should have proper error handling structure for pool creation', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(OrganizationDetailsPage); - - // Unit tests verify the component has access to the right dependencies - // The actual error re-throwing behavior is tested through integration tests - // where we can trigger the real handleCreatePool function via component events - expect(garmApi.createOrganizationPool).toBeDefined(); - }); - }); - - describe('WebSocket Event Handling', () => { - it('should have websocket subscription capabilities', async () => { - const { websocketStore } = await import('$lib/stores/websocket.js'); - - render(OrganizationDetailsPage); - - // Verify websocket store is available and properly mocked - expect(websocketStore.subscribeToEntity).toBeDefined(); - - // Test subscription functionality - const mockHandler = vi.fn(); - const unsubscribe = websocketStore.subscribeToEntity('organization', ['update'], mockHandler); - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith('organization', ['update'], mockHandler); - expect(unsubscribe).toBeInstanceOf(Function); - }); - - it('should handle organization update events', () => { - render(OrganizationDetailsPage); - - // Component should be set up to handle organization updates - expect(document.title).toContain('Organization Details'); - }); - - it('should handle organization deletion events', () => { - render(OrganizationDetailsPage); - - // Component should handle organization deletion via websocket - expect(document.title).toContain('Organization Details'); - }); - - it('should handle pool events', () => { - render(OrganizationDetailsPage); - - // Component should handle pool CRUD events via websocket - expect(document.title).toContain('Organization Details'); - }); - - it('should handle instance events', () => { - render(OrganizationDetailsPage); - - // Component should handle instance CRUD events via websocket - expect(document.title).toContain('Organization Details'); - }); - }); - - describe('Modal Management', () => { - it('should handle update modal state', () => { - render(OrganizationDetailsPage); - - // Component should manage update modal state - expect(document.title).toContain('Organization Details'); - }); - - it('should handle delete modal state', () => { - render(OrganizationDetailsPage); - - // Component should manage delete modal state - expect(document.title).toContain('Organization Details'); - }); - - it('should handle instance delete modal state', () => { - render(OrganizationDetailsPage); - - // Component should manage instance delete modal state - expect(document.title).toContain('Organization Details'); - }); - - it('should handle create pool modal state', () => { - render(OrganizationDetailsPage); - - // Component should manage create pool modal state - expect(document.title).toContain('Organization Details'); - }); - }); - - describe('Entity Field Updates', () => { - it('should preserve events when updating entity fields', async () => { - render(OrganizationDetailsPage); - - const currentEntity = { id: 'org-123', events: ['event1', 'event2'] }; - const updatedFields = { id: 'org-123', name: 'updated-name' }; - - // Test the updateEntityFields logic - const result = { ...updatedFields, events: currentEntity.events }; - - expect(result.events).toEqual(['event1', 'event2']); - expect(result.name).toBe('updated-name'); - }); - - it('should handle entity field updates correctly', () => { - render(OrganizationDetailsPage); - - // Component should handle selective entity updates - expect(document.title).toContain('Organization Details'); - }); - }); - - describe('Event Scrolling', () => { - it('should handle events container scrolling', () => { - render(OrganizationDetailsPage); - - // Component should handle event scrolling functionality - expect(document.title).toContain('Organization Details'); - }); - - it('should auto-scroll when new events are added', () => { - render(OrganizationDetailsPage); - - // Component should auto-scroll on new events - expect(document.title).toContain('Organization Details'); - }); - }); - - describe('Page Parameters', () => { - it('should extract organization ID from page params', () => { - render(OrganizationDetailsPage); - - // Component should extract org ID from page.params.id - expect(document.title).toContain('Organization Details'); - }); - - it('should handle missing organization ID', () => { - render(OrganizationDetailsPage); - - // Component should handle case when no organization ID is provided - expect(document.title).toContain('Organization Details'); - }); - }); - - describe('Utility Functions', () => { - it('should get correct forge icon', async () => { - const { getForgeIcon } = await import('$lib/utils/common.js'); - - render(OrganizationDetailsPage); - - const githubIcon = getForgeIcon('github'); - expect(getForgeIcon).toHaveBeenCalledWith('github'); - expect(githubIcon).toContain('svg'); - }); - - it('should extract API errors correctly', async () => { - const { extractAPIError } = await import('$lib/utils/apiError'); - - render(OrganizationDetailsPage); - - const error = new Error('API error'); - const extractedError = extractAPIError(error); - - expect(extractAPIError).toHaveBeenCalledWith(error); - expect(extractedError).toBe('API error'); - }); - }); - - describe('Component Lifecycle', () => { - it('should load data on mount', () => { - render(OrganizationDetailsPage); - - // Component should load organization data on mount - expect(document.title).toContain('Organization Details'); - }); - - it('should cleanup websocket subscriptions on destroy', () => { - const { unmount } = render(OrganizationDetailsPage); - - // Component should cleanup subscriptions on unmount - expect(() => unmount()).not.toThrow(); - }); - - it('should handle component initialization', () => { - const component = render(OrganizationDetailsPage); - - // Component should initialize without errors - expect(component.component).toBeDefined(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/organizations/page.integration.test.ts b/webapp/src/routes/organizations/page.integration.test.ts deleted file mode 100644 index 9716b714..00000000 --- a/webapp/src/routes/organizations/page.integration.test.ts +++ /dev/null @@ -1,533 +0,0 @@ -import { describe, it, expect, beforeEach, vi } from 'vitest'; -import { render, screen, waitFor } from '@testing-library/svelte'; -import userEvent from '@testing-library/user-event'; -import { createMockOrganization, createMockGiteaOrganization } from '../../test/factories.js'; - -// Create diverse test data for comprehensive testing -const mockOrganizations = [ - createMockOrganization({ - id: 'org-1', - name: 'test-org', - pool_manager_status: { running: true, failure_reason: undefined } - }), - createMockGiteaOrganization({ - id: 'org-2', - name: 'gitea-org', - pool_manager_status: { running: false, failure_reason: undefined } - }), - createMockOrganization({ - id: 'org-3', - name: 'another-org', - pool_manager_status: { running: false, failure_reason: 'Connection failed' } - }) -]; - -const mockCredentials = [ - { name: 'github-creds' }, - { name: 'gitea-creds' } -]; - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/PageHeader.svelte'); -vi.unmock('$lib/components/DataTable.svelte'); -vi.unmock('$lib/components/CreateOrganizationModal.svelte'); -vi.unmock('$lib/components/UpdateEntityModal.svelte'); -vi.unmock('$lib/components/DeleteModal.svelte'); -vi.unmock('$lib/components/cells'); - -// Only mock the external APIs, not UI components -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - createOrganization: vi.fn(), - updateOrganization: vi.fn(), - deleteOrganization: vi.fn(), - installOrganizationWebhook: vi.fn(), - listOrganizations: vi.fn() - } -})); - -// Create a dynamic store that can be updated during tests -let mockStoreData = { - organizations: mockOrganizations, - credentials: mockCredentials, - loaded: { organizations: true, credentials: true }, - loading: { organizations: false, credentials: false }, - errorMessages: { organizations: '', credentials: '' } -}; - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback) => { - callback(mockStoreData); - return () => {}; - }) - }, - eagerCacheManager: { - getOrganizations: vi.fn(), - retryResource: vi.fn(), - getCredentials: vi.fn() - } -})); - -// Helper to update mock store data -function updateMockStore(updates: Partial) { - mockStoreData = { ...mockStoreData, ...updates }; -} - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn(), - warning: vi.fn() - } -})); - -// Import the organizations page without any UI component mocks -import OrganizationsPage from './+page.svelte'; - -describe('Comprehensive Integration Tests for Organizations Page', () => { - let garmApi: any; - - beforeEach(async () => { - vi.clearAllMocks(); - // Reset mock store data - mockStoreData = { - organizations: mockOrganizations, - credentials: mockCredentials, - loaded: { organizations: true, credentials: true }, - loading: { organizations: false, credentials: false }, - errorMessages: { organizations: '', credentials: '' } - }; - - const apiClient = await import('$lib/api/client.js'); - garmApi = apiClient.garmApi; - - garmApi.createOrganization.mockResolvedValue({ id: 'new-org', name: 'new-org' }); - garmApi.updateOrganization.mockResolvedValue({}); - garmApi.deleteOrganization.mockResolvedValue({}); - }); - - describe('Component Rendering and Basic Structure', () => { - it('should render organizations page with multiple organizations', async () => { - const { container } = render(OrganizationsPage); - - // Verify page title and header - expect(screen.getByText('Organizations')).toBeInTheDocument(); - expect(screen.getByText('Manage GitHub and Gitea organizations')).toBeInTheDocument(); - - // Verify all organizations are rendered (use getAllByText for duplicates) - expect(screen.getAllByText('test-org')[0]).toBeInTheDocument(); - expect(screen.getAllByText('gitea-org')[0]).toBeInTheDocument(); - expect(screen.getAllByText('another-org')[0]).toBeInTheDocument(); - - // Verify action buttons are present - const editButtons = container.querySelectorAll('[title="Edit"], [title="Edit organization"]'); - const deleteButtons = container.querySelectorAll('[title="Delete"], [title="Delete organization"]'); - expect(editButtons.length).toBeGreaterThan(0); - expect(deleteButtons.length).toBeGreaterThan(0); - }); - - it('should display correct forge icons for different organization types', async () => { - const { container } = render(OrganizationsPage); - - // GitHub organizations should have GitHub icons - const githubIcons = container.querySelectorAll('svg'); - expect(githubIcons.length).toBeGreaterThan(0); - - // Verify endpoint names are displayed (use getAllByText for duplicates in responsive layouts) - expect(screen.getAllByText('github.com')[0]).toBeInTheDocument(); - expect(screen.getAllByText('gitea.example.com')[0]).toBeInTheDocument(); - }); - - it('should display organization status correctly', async () => { - const { container } = render(OrganizationsPage); - - // Verify status information is displayed for organizations - // Look for any status-related elements in the table - const tableElements = container.querySelectorAll('td, div'); - expect(tableElements.length).toBeGreaterThan(0); - - // Organizations page should render with status information - expect(screen.getByText('Organizations')).toBeInTheDocument(); - }); - - it('should have clickable organization links', async () => { - const { container } = render(OrganizationsPage); - - // Verify organization names are links - const orgLinks = container.querySelectorAll('a[href^="/organizations/"]'); - expect(orgLinks.length).toBeGreaterThan(0); - - // Check specific organization links - const org1Link = container.querySelector('a[href="/organizations/org-1"]'); - expect(org1Link).toBeInTheDocument(); - expect(org1Link?.textContent?.trim()).toBe('test-org'); - }); - }); - - describe('Search and Filtering Functionality', () => { - it('should filter organizations by search term', async () => { - const user = userEvent.setup(); - render(OrganizationsPage); - - // Find search input - const searchInput = screen.getByPlaceholderText('Search organizations...'); - expect(searchInput).toBeInTheDocument(); - - // Search for 'gitea' - should filter to only gitea organization - await user.type(searchInput, 'gitea'); - - // Wait for filtering to take effect - await waitFor(() => { - // Should still show gitea organization (may appear multiple times in responsive layout) - expect(screen.getAllByText('gitea-org')[0]).toBeInTheDocument(); - }); - }); - - it('should clear search when input is cleared', async () => { - const user = userEvent.setup(); - render(OrganizationsPage); - - const searchInput = screen.getByPlaceholderText('Search organizations...'); - - // Type search term - await user.type(searchInput, 'gitea'); - - // Clear search - await user.clear(searchInput); - - // All organizations should be visible again - await waitFor(() => { - expect(screen.getAllByText('test-org')[0]).toBeInTheDocument(); - expect(screen.getAllByText('gitea-org')[0]).toBeInTheDocument(); - expect(screen.getAllByText('another-org')[0]).toBeInTheDocument(); - }); - }); - - it('should show no results when search matches nothing', async () => { - const user = userEvent.setup(); - render(OrganizationsPage); - - const searchInput = screen.getByPlaceholderText('Search organizations...'); - - // Search for something that doesn't exist - await user.type(searchInput, 'nonexistent-org'); - - // Should show empty state or filtered results - await waitFor(() => { - // Search input should contain the search term - expect(searchInput).toHaveValue('nonexistent-org'); - // Component should handle empty search results gracefully - expect(screen.getByText('Organizations')).toBeInTheDocument(); - }); - }); - }); - - describe('Pagination Controls', () => { - it('should display pagination controls with correct options', async () => { - render(OrganizationsPage); - - // Find per-page selector - const perPageSelect = screen.getByLabelText('Show:'); - expect(perPageSelect).toBeInTheDocument(); - - // Verify options are available - expect(screen.getByText('25')).toBeInTheDocument(); - expect(screen.getByText('50')).toBeInTheDocument(); - expect(screen.getByText('100')).toBeInTheDocument(); - }); - - it('should allow changing items per page', async () => { - const user = userEvent.setup(); - render(OrganizationsPage); - - const perPageSelect = screen.getByLabelText('Show:'); - - // Change to 50 items per page - await user.selectOptions(perPageSelect, '50'); - - // Verify selection changed - expect(perPageSelect).toHaveValue('50'); - }); - }); - - describe('Modal Interactions', () => { - it('should open create organization modal when add button is clicked', async () => { - const user = userEvent.setup(); - render(OrganizationsPage); - - // Find and click the "Add Organization" button - const addButton = screen.getByText('Add Organization'); - expect(addButton).toBeInTheDocument(); - - await user.click(addButton); - - // Modal should open (depending on implementation) - // This tests that the button is properly wired up - expect(addButton).toBeInTheDocument(); - }); - - it('should open edit modal when edit button is clicked', async () => { - const user = userEvent.setup(); - const { container } = render(OrganizationsPage); - - // Find edit button for first organization - const editButtons = container.querySelectorAll('[title="Edit"], [title="Edit organization"]'); - expect(editButtons.length).toBeGreaterThan(0); - - const firstEditButton = editButtons[0] as HTMLElement; - - // Test that button is clickable (button may be replaced by modal) - await user.click(firstEditButton); - - // Verify the click interaction completed successfully - // (Modal may have opened, so button might not be accessible) - // The important thing is the click didn't cause errors - expect(screen.getByText('Organizations')).toBeInTheDocument(); - }); - - it('should open delete modal when delete button is clicked', async () => { - const user = userEvent.setup(); - const { container } = render(OrganizationsPage); - - // Find delete button for first organization - const deleteButtons = container.querySelectorAll('[title="Delete"], [title="Delete organization"]'); - expect(deleteButtons.length).toBeGreaterThan(0); - - const firstDeleteButton = deleteButtons[0] as HTMLElement; - - // Test that button is clickable (button may be replaced by modal) - await user.click(firstDeleteButton); - - // Verify the click interaction completed successfully - // (Modal may have opened, so button might not be accessible) - // The important thing is the click didn't cause errors - expect(screen.getByText('Organizations')).toBeInTheDocument(); - }); - }); - - describe('Error States and Loading States', () => { - it('should handle loading state correctly', async () => { - // Update mock store to show loading state - updateMockStore({ - loading: { organizations: true, credentials: false }, - loaded: { organizations: false, credentials: true }, - organizations: [] - }); - - render(OrganizationsPage); - - // Component should still render basic structure during loading - expect(screen.getByText('Organizations')).toBeInTheDocument(); - expect(screen.getByText('Manage GitHub and Gitea organizations')).toBeInTheDocument(); - expect(screen.getByText('Add Organization')).toBeInTheDocument(); - }); - - it('should handle error state correctly', async () => { - // Update mock store to show error state - updateMockStore({ - errorMessages: { organizations: 'Failed to load organizations', credentials: '' }, - loaded: { organizations: false, credentials: true }, - organizations: [] - }); - - render(OrganizationsPage); - - // Component should still render page structure even with errors - expect(screen.getByText('Organizations')).toBeInTheDocument(); - expect(screen.getByText('Add Organization')).toBeInTheDocument(); - // Should render gracefully without crashing - expect(screen.getByText('Manage GitHub and Gitea organizations')).toBeInTheDocument(); - }); - - it('should handle empty organization list', async () => { - // Update mock store to have no organizations - updateMockStore({ - organizations: [], - loaded: { organizations: true, credentials: true } - }); - - render(OrganizationsPage); - - // Should still render page structure - expect(screen.getByText('Organizations')).toBeInTheDocument(); - expect(screen.getByText('Add Organization')).toBeInTheDocument(); - }); - }); - - describe('Component Integration and Data Flow', () => { - it('should render consistent UI based on component state', async () => { - render(OrganizationsPage); - - // Component should display all organizations from initial state - expect(screen.getAllByText('test-org')[0]).toBeInTheDocument(); - expect(screen.getAllByText('gitea-org')[0]).toBeInTheDocument(); - expect(screen.getAllByText('another-org')[0]).toBeInTheDocument(); - - // Should show both GitHub and Gitea endpoints - expect(screen.getAllByText('github.com')[0]).toBeInTheDocument(); - expect(screen.getAllByText('gitea.example.com')[0]).toBeInTheDocument(); - }); - - it('should properly subscribe to eager cache on component mount', async () => { - render(OrganizationsPage); - - // Verify component subscribes to and displays cache data - expect(screen.getAllByText('test-org')[0]).toBeInTheDocument(); - expect(screen.getAllByText('gitea-org')[0]).toBeInTheDocument(); - expect(screen.getAllByText('another-org')[0]).toBeInTheDocument(); - - // Verify organizations from different forge types are displayed - expect(screen.getAllByText('github.com')[0]).toBeInTheDocument(); - expect(screen.getAllByText('gitea.example.com')[0]).toBeInTheDocument(); - - // Verify component renders the correct number of organizations in the UI - // (This tests actual component rendering, not our mock setup) - const orgLinks = document.querySelectorAll('a[href^="/organizations/"]'); - expect(orgLinks.length).toBeGreaterThan(0); - }); - - it('should handle different data states gracefully', async () => { - // Test with empty data state - updateMockStore({ - organizations: [], - loaded: { organizations: true, credentials: true } - }); - - render(OrganizationsPage); - - // Component should render gracefully with no organizations - expect(screen.getByText('Organizations')).toBeInTheDocument(); - expect(screen.getByText('Add Organization')).toBeInTheDocument(); - - // Should still show the data table structure - expect(document.body).toBeInTheDocument(); - }); - }); - - describe('Responsive Design and Accessibility', () => { - it('should render mobile and desktop layouts', async () => { - const { container } = render(OrganizationsPage); - - // Check for responsive classes - const mobileView = container.querySelector('.block.sm\\:hidden'); - const desktopView = container.querySelector('.hidden.sm\\:block'); - - // Both mobile and desktop views should be present - expect(mobileView || desktopView).toBeInTheDocument(); - }); - - it('should have proper accessibility attributes', async () => { - const { container } = render(OrganizationsPage); - - // Check for ARIA labels and titles - const buttonsWithAria = container.querySelectorAll('[aria-label], [title]'); - expect(buttonsWithAria.length).toBeGreaterThan(0); - - // Check for proper form labels - search input should be accessible - const searchInput = screen.getByPlaceholderText('Search organizations...'); - expect(searchInput).toBeInTheDocument(); - - // Check for screen reader label - const searchLabel = container.querySelector('label[for="search"]'); - expect(searchLabel).toBeInTheDocument(); - }); - }); - - describe('User Interaction Flows', () => { - it('should support keyboard navigation', async () => { - const user = userEvent.setup(); - render(OrganizationsPage); - - // Test tab navigation through interactive elements - const searchInput = screen.getByPlaceholderText('Search organizations...'); - - // Click to focus first, then test tab navigation - await user.click(searchInput); - expect(searchInput).toHaveFocus(); - - // Tab should move focus to next element - await user.tab(); - }); - - it('should handle rapid user interactions', async () => { - const user = userEvent.setup(); - render(OrganizationsPage); - - // Rapid clicking should not break the UI - const addButton = screen.getByText('Add Organization'); - - // Click multiple times rapidly - await user.click(addButton); - await user.click(addButton); - await user.click(addButton); - - // Component should remain stable - expect(addButton).toBeInTheDocument(); - }); - - it('should handle concurrent search and pagination changes', async () => { - const user = userEvent.setup(); - render(OrganizationsPage); - - const searchInput = screen.getByPlaceholderText('Search organizations...'); - const perPageSelect = screen.getByLabelText('Show:'); - - // Perform search and pagination changes simultaneously - await user.type(searchInput, 'test'); - await user.selectOptions(perPageSelect, '50'); - - // Both changes should be applied - expect(searchInput).toHaveValue('test'); - expect(perPageSelect).toHaveValue('50'); - }); - }); - - describe('Data Consistency and State Management', () => { - it('should maintain UI consistency during user operations', async () => { - const user = userEvent.setup(); - render(OrganizationsPage); - - // Initial UI should show all organizations - expect(screen.getAllByText('test-org')[0]).toBeInTheDocument(); - expect(screen.getAllByText('gitea-org')[0]).toBeInTheDocument(); - expect(screen.getAllByText('another-org')[0]).toBeInTheDocument(); - - // User interactions should not break the UI consistency - const addButton = screen.getByText('Add Organization'); - await user.click(addButton); - - // Page should remain stable after interactions - expect(screen.getByText('Organizations')).toBeInTheDocument(); - }); - - it('should maintain UI consistency during state changes', async () => { - render(OrganizationsPage); - - // Initially should show all organizations - expect(screen.getAllByText('test-org')[0]).toBeInTheDocument(); - - // Component should handle state transitions gracefully - // (In real app, Svelte reactivity would update UI when store changes) - expect(screen.getByText('Organizations')).toBeInTheDocument(); - expect(screen.getByText('Add Organization')).toBeInTheDocument(); - }); - - it('should display mixed organization types correctly in UI', async () => { - const { container } = render(OrganizationsPage); - - // Should display both GitHub and Gitea organizations in the UI - expect(screen.getAllByText('github.com')[0]).toBeInTheDocument(); - expect(screen.getAllByText('gitea.example.com')[0]).toBeInTheDocument(); - - // Should show organization names for both types - expect(screen.getAllByText('test-org')[0]).toBeInTheDocument(); // GitHub - expect(screen.getAllByText('gitea-org')[0]).toBeInTheDocument(); // Gitea - - // Should have appropriate forge icons for each type - const svgIcons = container.querySelectorAll('svg'); - expect(svgIcons.length).toBeGreaterThan(0); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/organizations/page.render.test.ts b/webapp/src/routes/organizations/page.render.test.ts deleted file mode 100644 index e2b356c0..00000000 --- a/webapp/src/routes/organizations/page.render.test.ts +++ /dev/null @@ -1,174 +0,0 @@ -import { describe, it, expect, beforeEach, vi } from 'vitest'; -import { render, screen } from '@testing-library/svelte'; -import { createMockOrganization } from '../../test/factories.js'; - -// Mock all external dependencies but keep the component rendering real -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - createOrganization: vi.fn(), - updateOrganization: vi.fn(), - deleteOrganization: vi.fn(), - installOrganizationWebhook: vi.fn(), - listOrganizations: vi.fn() - } -})); - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback) => { - callback({ - organizations: [], - credentials: [], - loaded: { organizations: true, credentials: true }, - loading: { organizations: false, credentials: false }, - errorMessages: { organizations: '', credentials: '' } - }); - return () => {}; - }) - }, - eagerCacheManager: { - getOrganizations: vi.fn(), - retryResource: vi.fn(), - getCredentials: vi.fn() - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn(), - warning: vi.fn() - } -})); - -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path) => path) -})); - -vi.mock('$app/environment', () => ({ - browser: false, - dev: true, - building: false -})); - -vi.mock('$lib/components/CreateOrganizationModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/UpdateEntityModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/DeleteModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/PageHeader.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/DataTable.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/Badge.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/ActionButton.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/cells', () => ({ - EntityCell: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })), - EndpointCell: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })), - StatusCell: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })), - ActionsCell: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })), - GenericCell: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/utils/common.js', () => ({ - getForgeIcon: vi.fn((type) => ``), - getEntityStatusBadge: vi.fn(() => ({ variant: 'success', text: 'Running' })), - filterByName: vi.fn((items, term) => - term ? items.filter((item: any) => - item.name.toLowerCase().includes(term.toLowerCase()) - ) : items - ) -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((error) => error.message || 'API Error') -})); - -import OrganizationsPage from './+page.svelte'; - -describe('Organizations Page Rendering Tests', () => { - beforeEach(() => { - vi.clearAllMocks(); - }); - - describe('Component Rendering', () => { - it('should render without crashing', () => { - const { container } = render(OrganizationsPage); - expect(container).toBeInTheDocument(); - }); - - it('should render as a valid DOM element', () => { - const { container } = render(OrganizationsPage); - expect(container.firstChild).toBeInstanceOf(HTMLElement); - }); - - it('should have proper document title', () => { - render(OrganizationsPage); - expect(document.title).toBe('Organizations - GARM'); - }); - - it('should render with correct structure', () => { - const { container } = render(OrganizationsPage); - expect(container.firstChild).toHaveClass('space-y-6'); - }); - - it('should handle empty state rendering', () => { - render(OrganizationsPage); - // Component should render even with no organizations - expect(document.body).toBeInTheDocument(); - }); - }); - - describe('Component Lifecycle', () => { - it('should mount successfully', () => { - const component = render(OrganizationsPage); - expect(component.component).toBeDefined(); - }); - - it('should unmount without errors', () => { - const { unmount } = render(OrganizationsPage); - expect(() => unmount()).not.toThrow(); - }); - }); - - describe('DOM Structure Validation', () => { - it('should create proper HTML structure', () => { - const { container } = render(OrganizationsPage); - - // Should have main container - expect(container.querySelector('.space-y-6')).toBeInTheDocument(); - }); - - it('should handle conditional rendering', () => { - const { container } = render(OrganizationsPage); - - // Component should render without any modals open initially - expect(container).toBeInTheDocument(); - }); - - it('should render with proper accessibility structure', () => { - const { container } = render(OrganizationsPage); - - // Basic accessibility checks - expect(container).toBeInTheDocument(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/organizations/page.test.ts b/webapp/src/routes/organizations/page.test.ts deleted file mode 100644 index 5d444c3d..00000000 --- a/webapp/src/routes/organizations/page.test.ts +++ /dev/null @@ -1,545 +0,0 @@ -import { describe, it, expect, beforeEach, vi } from 'vitest'; -import { render } from '@testing-library/svelte'; -import { createMockOrganization, createMockGiteaOrganization } from '../../test/factories.js'; - -// Mock all external dependencies -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - createOrganization: vi.fn(), - updateOrganization: vi.fn(), - deleteOrganization: vi.fn(), - installOrganizationWebhook: vi.fn(), - listOrganizations: vi.fn() - } -})); - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback) => { - callback({ - organizations: [], - credentials: [], - loaded: { organizations: true, credentials: true }, - loading: { organizations: false, credentials: false }, - errorMessages: { organizations: '', credentials: '' } - }); - return () => {}; - }) - }, - eagerCacheManager: { - getOrganizations: vi.fn(), - retryResource: vi.fn(), - getCredentials: vi.fn() - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn(), - warning: vi.fn() - } -})); - -// Mock SvelteKit modules -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path) => path) -})); - -vi.mock('$app/environment', () => ({ - browser: false, - dev: true, - building: false -})); - -// Mock all child components -vi.mock('$lib/components/CreateOrganizationModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/UpdateEntityModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/DeleteModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/PageHeader.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/DataTable.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/Badge.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/ActionButton.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/cells', () => ({ - EntityCell: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })), - EndpointCell: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })), - StatusCell: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })), - ActionsCell: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })), - GenericCell: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/utils/common.js', () => ({ - getForgeIcon: vi.fn((type) => ``), - getEntityStatusBadge: vi.fn(() => ({ variant: 'success', text: 'Running' })), - filterByName: vi.fn((items, term) => - term ? items.filter((item: any) => - item.name.toLowerCase().includes(term.toLowerCase()) - ) : items - ) -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((error) => error.message || 'API Error') -})); - -import OrganizationsPage from './+page.svelte'; - -describe('Organizations Page Unit Tests', () => { - let mockOrganizations: any[]; - - beforeEach(() => { - vi.clearAllMocks(); - mockOrganizations = [ - createMockOrganization({ - id: 'org-1', - name: 'test-org', - pool_manager_status: { running: true, failure_reason: undefined } - }), - createMockGiteaOrganization({ - id: 'org-2', - name: 'gitea-org', - pool_manager_status: { running: false, failure_reason: undefined } - }) - ]; - }); - - describe('Component Structure', () => { - it('should render organizations page', () => { - const { container } = render(OrganizationsPage); - expect(container).toBeInTheDocument(); - }); - - it('should set correct page title', () => { - render(OrganizationsPage); - expect(document.title).toBe('Organizations - GARM'); - }); - - it('should have organizations state variables', async () => { - const component = render(OrganizationsPage); - expect(component).toBeDefined(); - }); - }); - - describe('Data Management', () => { - it('should initialize with correct default values', () => { - // Component should render without errors and set up initial state - const { container } = render(OrganizationsPage); - expect(container).toBeInTheDocument(); - expect(document.title).toBe('Organizations - GARM'); - }); - - it('should handle organizations data from eager cache', () => { - // Component should render structure for handling cache data - const { container } = render(OrganizationsPage); - expect(container.querySelector('.space-y-6')).toBeInTheDocument(); - }); - }); - - describe('Search and Filtering', () => { - it('should filter organizations by search term', async () => { - const { filterByName } = await import('$lib/utils/common.js'); - - const filtered = filterByName(mockOrganizations, 'test'); - expect(filterByName).toHaveBeenCalledWith(mockOrganizations, 'test'); - expect(filtered).toHaveLength(1); - expect(filtered[0].name).toBe('test-org'); - }); - - it('should return all organizations when search term is empty', async () => { - const { filterByName } = await import('$lib/utils/common.js'); - - const filtered = filterByName(mockOrganizations, ''); - expect(filterByName).toHaveBeenCalledWith(mockOrganizations, ''); - expect(filtered).toHaveLength(2); - }); - - it('should handle case-insensitive search', async () => { - const { filterByName } = await import('$lib/utils/common.js'); - - filterByName(mockOrganizations, 'TEST'); - expect(filterByName).toHaveBeenCalledWith(mockOrganizations, 'TEST'); - }); - - it('should reset to first page when searching', () => { - render(OrganizationsPage); - // Component should reset currentPage to 1 when search term changes - expect(document.title).toBe('Organizations - GARM'); - }); - }); - - describe('Pagination Logic', () => { - it('should calculate total pages correctly', () => { - const organizations = Array(75).fill(null).map((_, i) => - createMockOrganization({ id: `org-${i}`, name: `org-${i}` }) - ); - const perPage = 25; - const totalPages = Math.ceil(organizations.length / perPage); - expect(totalPages).toBe(3); - }); - - it('should calculate paginated organizations correctly', () => { - const organizations = Array(75).fill(null).map((_, i) => - createMockOrganization({ id: `org-${i}`, name: `org-${i}` }) - ); - const currentPage = 2; - const perPage = 25; - const start = (currentPage - 1) * perPage; - const paginatedOrganizations = organizations.slice(start, start + perPage); - - expect(paginatedOrganizations).toHaveLength(25); - expect(paginatedOrganizations[0].name).toBe('org-25'); - expect(paginatedOrganizations[24].name).toBe('org-49'); - }); - - it('should adjust current page when it exceeds total pages', () => { - // When filtering reduces results, current page should adjust - const totalPages = 2; - let currentPage = 5; - - if (currentPage > totalPages && totalPages > 0) { - currentPage = totalPages; - } - - expect(currentPage).toBe(2); - }); - - it('should handle empty results gracefully', () => { - const organizations: any[] = []; - const perPage = 25; - const totalPages = Math.ceil(organizations.length / perPage); - expect(totalPages).toBe(0); - }); - }); - - describe('Modal Management', () => { - it('should have correct initial modal states', () => { - render(OrganizationsPage); - // Component should render without modal states - expect(document.title).toBe('Organizations - GARM'); - }); - - it('should handle create modal opening', () => { - render(OrganizationsPage); - // Component should handle modal state management - expect(document.title).toBe('Organizations - GARM'); - }); - - it('should handle update modal opening with organization', () => { - render(OrganizationsPage); - // Component should handle update modal state - expect(document.title).toBe('Organizations - GARM'); - }); - - it('should handle delete modal opening with organization', () => { - render(OrganizationsPage); - // Component should handle delete modal state - expect(document.title).toBe('Organizations - GARM'); - }); - - it('should close all modals', () => { - render(OrganizationsPage); - // Component should handle modal closing - expect(document.title).toBe('Organizations - GARM'); - }); - }); - - describe('API Integration', () => { - it('should call createOrganization API', async () => { - const { garmApi } = await import('$lib/api/client.js'); - render(OrganizationsPage); - - const orgParams = { - name: 'new-org', - credentials_name: 'test-creds', - webhook_secret: 'secret123', - pool_balancer_type: 'roundrobin' - }; - - await garmApi.createOrganization(orgParams); - expect(garmApi.createOrganization).toHaveBeenCalledWith(orgParams); - }); - - it('should call updateOrganization API', async () => { - const { garmApi } = await import('$lib/api/client.js'); - render(OrganizationsPage); - - const updateParams = { webhook_secret: 'new-secret' }; - await garmApi.updateOrganization('org-1', updateParams); - expect(garmApi.updateOrganization).toHaveBeenCalledWith('org-1', updateParams); - }); - - it('should call deleteOrganization API', async () => { - const { garmApi } = await import('$lib/api/client.js'); - render(OrganizationsPage); - - await garmApi.deleteOrganization('org-1'); - expect(garmApi.deleteOrganization).toHaveBeenCalledWith('org-1'); - }); - - it('should call installOrganizationWebhook API when requested', async () => { - const { garmApi } = await import('$lib/api/client.js'); - render(OrganizationsPage); - - await garmApi.installOrganizationWebhook('org-1'); - expect(garmApi.installOrganizationWebhook).toHaveBeenCalledWith('org-1'); - }); - }); - - describe('Toast Notifications', () => { - it('should show success toast for organization creation', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - render(OrganizationsPage); - - toastStore.success('Organization Created', 'Organization test-org has been created successfully.'); - expect(toastStore.success).toHaveBeenCalledWith( - 'Organization Created', - 'Organization test-org has been created successfully.' - ); - }); - - it('should show success toast for organization update', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - render(OrganizationsPage); - - toastStore.success('Organization Updated', 'Organization test-org has been updated successfully.'); - expect(toastStore.success).toHaveBeenCalledWith( - 'Organization Updated', - 'Organization test-org has been updated successfully.' - ); - }); - - it('should show success toast for organization deletion', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - render(OrganizationsPage); - - toastStore.success('Organization Deleted', 'Organization test-org has been deleted successfully.'); - expect(toastStore.success).toHaveBeenCalledWith( - 'Organization Deleted', - 'Organization test-org has been deleted successfully.' - ); - }); - - it('should show error toast for API failures', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - render(OrganizationsPage); - - toastStore.error('Delete Failed', 'Organization deletion failed'); - expect(toastStore.error).toHaveBeenCalledWith('Delete Failed', 'Organization deletion failed'); - }); - }); - - describe('DataTable Configuration', () => { - it('should have correct column configuration', () => { - render(OrganizationsPage); - - // DataTable should be configured with proper columns - const expectedColumns = [ - { key: 'name', title: 'Name' }, - { key: 'endpoint', title: 'Endpoint' }, - { key: 'credentials', title: 'Credentials' }, - { key: 'status', title: 'Status' }, - { key: 'actions', title: 'Actions', align: 'right' } - ]; - - expect(expectedColumns).toHaveLength(5); - }); - - it('should have correct mobile card configuration', () => { - render(OrganizationsPage); - - // Mobile card should be configured for organizations - const config = { - entityType: 'organization', - primaryText: { field: 'name', isClickable: true, href: '/organizations/{id}' } - }; - - expect(config.entityType).toBe('organization'); - expect(config.primaryText.field).toBe('name'); - expect(config.primaryText.isClickable).toBe(true); - }); - }); - - describe('Event Handlers', () => { - it('should handle table search event', () => { - render(OrganizationsPage); - - // handleTableSearch should update searchTerm and reset page - const mockEvent = { detail: { term: 'test-search' } }; - expect(mockEvent.detail.term).toBe('test-search'); - }); - - it('should handle table page change event', () => { - render(OrganizationsPage); - - // handleTablePageChange should update currentPage - const mockEvent = { detail: { page: 3 } }; - expect(mockEvent.detail.page).toBe(3); - }); - - it('should handle table per-page change event', () => { - render(OrganizationsPage); - - // handleTablePerPageChange should update perPage and reset page - const mockEvent = { detail: { perPage: 50 } }; - expect(mockEvent.detail.perPage).toBe(50); - }); - - it('should handle edit action event', () => { - render(OrganizationsPage); - - // handleEdit should call openUpdateModal - const mockOrganization = createMockOrganization(); - const mockEvent = { detail: { item: mockOrganization } }; - expect(mockEvent.detail.item).toBe(mockOrganization); - }); - - it('should handle delete action event', () => { - render(OrganizationsPage); - - // handleDelete should call openDeleteModal - const mockOrganization = createMockOrganization(); - const mockEvent = { detail: { item: mockOrganization } }; - expect(mockEvent.detail.item).toBe(mockOrganization); - }); - }); - - describe('Error Handling', () => { - it('should handle API errors in organization creation', async () => { - const { extractAPIError } = await import('$lib/utils/apiError'); - render(OrganizationsPage); - - const error = new Error('Creation failed'); - const extractedError = extractAPIError(error); - expect(extractAPIError).toHaveBeenCalledWith(error); - expect(extractedError).toBe('Creation failed'); - }); - - it('should handle webhook installation errors', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - render(OrganizationsPage); - - // Should show error toast for webhook installation failure - toastStore.error( - 'Webhook Installation Failed', - 'Failed to install webhook. You can try installing it manually from the organization details page.' - ); - expect(toastStore.error).toHaveBeenCalled(); - }); - - it('should handle organizations loading errors', () => { - render(OrganizationsPage); - - // Component should render without errors during error states - expect(document.title).toBe('Organizations - GARM'); - }); - - it('should handle retry functionality', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - render(OrganizationsPage); - - await eagerCacheManager.retryResource('organizations'); - expect(eagerCacheManager.retryResource).toHaveBeenCalledWith('organizations'); - }); - }); - - describe('Utility Functions', () => { - it('should get correct forge icon', async () => { - const { getForgeIcon } = await import('$lib/utils/common.js'); - - const githubIcon = getForgeIcon('github'); - const giteaIcon = getForgeIcon('gitea'); - - expect(getForgeIcon).toHaveBeenCalledWith('github'); - expect(getForgeIcon).toHaveBeenCalledWith('gitea'); - expect(githubIcon).toContain('svg'); - expect(giteaIcon).toContain('svg'); - }); - - it('should get entity status badge', async () => { - const { getEntityStatusBadge } = await import('$lib/utils/common.js'); - - const organization = createMockOrganization({ - pool_manager_status: { running: true, failure_reason: undefined } - }); - - const badge = getEntityStatusBadge(organization); - expect(getEntityStatusBadge).toHaveBeenCalledWith(organization); - expect(badge).toEqual({ variant: 'success', text: 'Running' }); - }); - }); - - describe('Reactive Statements', () => { - it('should update filtered organizations when search term changes', () => { - render(OrganizationsPage); - - // Component should handle reactive filtering - expect(document.title).toBe('Organizations - GARM'); - }); - - it('should recalculate total pages when filtered organizations change', () => { - render(OrganizationsPage); - - // Component should handle reactive pagination - expect(document.title).toBe('Organizations - GARM'); - }); - - it('should adjust current page when total pages change', () => { - render(OrganizationsPage); - - // Component should handle page adjustments - expect(document.title).toBe('Organizations - GARM'); - }); - - it('should update paginated organizations when page or filter changes', () => { - render(OrganizationsPage); - - // Component should handle reactive pagination updates - expect(document.title).toBe('Organizations - GARM'); - }); - }); - - describe('Lifecycle Management', () => { - it('should load organizations on mount', () => { - render(OrganizationsPage); - - // Component should load without errors on mount - expect(document.title).toBe('Organizations - GARM'); - }); - - it('should handle mount errors gracefully', () => { - render(OrganizationsPage); - - // Component should handle mount errors gracefully - expect(document.title).toBe('Organizations - GARM'); - }); - - it('should subscribe to eager cache', () => { - render(OrganizationsPage); - - // Component should set up cache subscription - expect(document.title).toBe('Organizations - GARM'); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/pools/+page.svelte b/webapp/src/routes/pools/+page.svelte deleted file mode 100644 index e6301359..00000000 --- a/webapp/src/routes/pools/+page.svelte +++ /dev/null @@ -1,348 +0,0 @@ - - - - Pools - GARM - - -
                - - - - -
                - - -{#if showCreateModal} - showCreateModal = false} - on:submit={handleCreatePool} - /> -{/if} - -{#if showUpdateModal && selectedPool} - { showUpdateModal = false; selectedPool = null; }} - on:submit={(e) => handleUpdatePool(e.detail)} - /> -{/if} - -{#if showDeleteModal && selectedPool} - { showDeleteModal = false; selectedPool = null; }} - on:confirm={handleDeletePool} - /> -{/if} \ No newline at end of file diff --git a/webapp/src/routes/pools/[id]/+page.svelte b/webapp/src/routes/pools/[id]/+page.svelte deleted file mode 100644 index 228b070f..00000000 --- a/webapp/src/routes/pools/[id]/+page.svelte +++ /dev/null @@ -1,396 +0,0 @@ - - - - {pool ? `Pool ${pool.id} - Pool Details` : 'Pool Details'} - GARM - - -
                - - - - {#if loading} -
                -
                -

                Loading pool...

                -
                - {:else if error} -
                -

                {error}

                -
                - {:else if pool} - - showUpdateModal = true} - onDelete={() => showDeleteModal = true} - /> - - -
                - -
                -
                -

                Basic Information

                -
                -
                -
                Pool ID
                -
                {pool.id}
                -
                -
                -
                Provider
                -
                {pool.provider_name}
                -
                -
                -
                Image
                -
                - {pool.image} -
                -
                -
                -
                Flavor
                -
                {pool.flavor}
                -
                -
                -
                Status
                -
                - - {pool.enabled ? 'Enabled' : 'Disabled'} - -
                -
                -
                -
                Entity
                -
                -
                - - {getEntityType(pool)} - - - {getEntityName(pool)} - -
                -
                -
                -
                -
                Created At
                -
                {formatDate(pool.created_at || '')}
                -
                -
                -
                Updated At
                -
                {formatDate(pool.updated_at || '')}
                -
                -
                -
                -
                - - -
                -
                -

                Configuration

                -
                -
                -
                Max Runners
                -
                {pool.max_runners}
                -
                -
                -
                Min Idle Runners
                -
                {pool.min_idle_runners}
                -
                -
                -
                Bootstrap Timeout
                -
                {pool.runner_bootstrap_timeout} minutes
                -
                -
                -
                Priority
                -
                {pool.priority}
                -
                -
                -
                Runner Prefix
                -
                {pool.runner_prefix || 'garm'}
                -
                -
                -
                OS Type / Architecture
                -
                {pool.os_type} / {pool.os_arch}
                -
                - {#if pool['github-runner-group']} -
                -
                GitHub Runner Group
                -
                {pool['github-runner-group']}
                -
                - {/if} - {#if pool.tags && pool.tags.length > 0} -
                -
                Tags
                -
                -
                - {#each pool.tags as tag} - - {typeof tag === 'string' ? tag : tag.name} - - {/each} -
                -
                -
                - {/if} -
                -
                -
                -
                - - - {#if pool.extra_specs} -
                -
                -

                Extra Specifications

                -
                {formatExtraSpecs(pool.extra_specs)}
                -
                -
                - {/if} - - - {#if pool.instances} - - {/if} - - {/if} -
                - - -{#if showUpdateModal && pool} - showUpdateModal = false} - on:submit={(e) => handleUpdate(e.detail)} - /> -{/if} - -{#if showDeleteModal && pool} - showDeleteModal = false} - on:confirm={handleDelete} - /> -{/if} - -{#if showDeleteInstanceModal && selectedInstance} - { showDeleteInstanceModal = false; selectedInstance = null; }} - on:confirm={handleDeleteInstance} - /> -{/if} \ No newline at end of file diff --git a/webapp/src/routes/pools/page.integration.test.ts b/webapp/src/routes/pools/page.integration.test.ts deleted file mode 100644 index 90dff90f..00000000 --- a/webapp/src/routes/pools/page.integration.test.ts +++ /dev/null @@ -1,672 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render, screen, waitFor, fireEvent } from '@testing-library/svelte'; -import PoolsPage from './+page.svelte'; -import { createMockPool } from '../../test/factories.js'; - -// Mock app stores -vi.mock('$app/stores', () => ({})); - -vi.mock('$app/navigation', () => ({})); - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/PageHeader.svelte'); -vi.unmock('$lib/components/DataTable.svelte'); -vi.unmock('$lib/components/CreatePoolModal.svelte'); -vi.unmock('$lib/components/UpdatePoolModal.svelte'); -vi.unmock('$lib/components/DeleteModal.svelte'); -vi.unmock('$lib/components/cells'); - -// Only mock the data layer - APIs and stores -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - updatePool: vi.fn(), - deletePool: vi.fn() - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - add: vi.fn(), - error: vi.fn(), - info: vi.fn() - } -})); - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback: any) => { - callback({ - pools: [], - loaded: { pools: false }, - loading: { pools: false }, - errorMessages: { pools: '' }, - repositories: [], - organizations: [], - enterprises: [] - }); - return () => {}; - }) - }, - eagerCacheManager: { - getPools: vi.fn(), - retryResource: vi.fn() - } -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -vi.mock('$lib/utils/common.js', async (importOriginal) => { - const actual = await importOriginal() as any; - return { - ...(actual as any), - getEntityName: vi.fn((pool, cache) => { - // Simulate entity name resolution based on pool data - if (pool.repo_id && cache?.repositories) { - const repo = cache.repositories.find((r: any) => r.id === pool.repo_id); - return repo ? `${repo.owner}/${repo.name}` : 'Unknown Repo'; - } - if (pool.org_id && cache?.organizations) { - const org = cache.organizations.find((o: any) => o.id === pool.org_id); - return org ? org.name : 'Unknown Org'; - } - if (pool.enterprise_id && cache?.enterprises) { - const ent = cache.enterprises.find((e: any) => e.id === pool.enterprise_id); - return ent ? ent.name : 'Unknown Enterprise'; - } - return 'Test Entity'; - }), - filterEntities: vi.fn((entities, searchTerm, nameGetter) => { - if (!searchTerm) return entities; - return entities.filter((entity: any) => { - const name = nameGetter ? nameGetter(entity) : entity.name; - return name?.toLowerCase().includes(searchTerm.toLowerCase()); - }); - }) - }; -}); - -const mockPool = createMockPool({ - id: 'pool-123', - image: 'ubuntu:22.04', - flavor: 'default', - provider_name: 'hetzner', - enabled: true, - repo_id: 'repo-123' -}); - -const mockPools = [mockPool]; - -// Global setup for each test -let garmApi: any; -let toastStore: any; -let eagerCache: any; -let eagerCacheManager: any; - -describe('Comprehensive Integration Tests for Pools Page', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up API mocks with default successful responses - const apiModule = await import('$lib/api/client.js'); - garmApi = apiModule.garmApi; - - const toastModule = await import('$lib/stores/toast.js'); - toastStore = toastModule.toastStore; - - const cacheModule = await import('$lib/stores/eager-cache.js'); - eagerCache = cacheModule.eagerCache; - eagerCacheManager = cacheModule.eagerCacheManager; - - (garmApi.updatePool as any).mockResolvedValue(mockPool); - (garmApi.deletePool as any).mockResolvedValue({}); - (eagerCacheManager.getPools as any).mockResolvedValue(mockPools); - (eagerCacheManager.retryResource as any).mockResolvedValue(mockPools); - }); - - describe('Component Rendering and Data Display', () => { - it('should render pools page with real components', async () => { - render(PoolsPage); - - await waitFor(() => { - // Wait for data to load - expect(eagerCacheManager.getPools).toHaveBeenCalled(); - }); - - // Should render the page header - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - expect(screen.getByText('Manage runner pools across all entities')).toBeInTheDocument(); - - // Should render main content sections - expect(screen.getByText(/Loading pools/i)).toBeInTheDocument(); - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - }); - - it('should display pools data in table format', async () => { - render(PoolsPage); - - await waitFor(() => { - // Wait for data loading to complete - expect(eagerCacheManager.getPools).toHaveBeenCalled(); - }); - - // Should display table structure correctly - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - expect(screen.getByDisplayValue('25')).toBeInTheDocument(); - }); - - it('should render pool information with entity context', async () => { - render(PoolsPage); - - await waitFor(() => { - expect(eagerCacheManager.getPools).toHaveBeenCalled(); - }); - - // Should display correct page structure - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - }); - }); - - describe('Pool Creation Integration', () => { - it('should handle pool creation workflow', async () => { - render(PoolsPage); - - await waitFor(() => { - // Wait for data to load through cache integration - expect(eagerCacheManager.getPools).toHaveBeenCalled(); - }); - - // Should have add pool button - const addButton = screen.getByRole('button', { name: /Add Pool/i }); - expect(addButton).toBeInTheDocument(); - - // Click add button should show create modal - await fireEvent.click(addButton); - expect(screen.getByText(/Create Pool/i)).toBeInTheDocument(); - }); - - it('should show success toast on pool creation', async () => { - render(PoolsPage); - - await waitFor(() => { - expect(eagerCacheManager.getPools).toHaveBeenCalled(); - }); - - // Success toast functionality should be available - expect(toastStore.success).toBeDefined(); - - // Should have create pool functionality - expect(screen.getByRole('button', { name: /Add Pool/i })).toBeInTheDocument(); - }); - }); - - describe('Pool Update Integration', () => { - it('should handle pool update workflow', async () => { - // Mock cache with pools data - vi.mocked(eagerCache.subscribe).mockImplementation((callback: any) => { - callback({ - pools: mockPools, - loaded: { pools: true }, - loading: { pools: false }, - errorMessages: { pools: '' }, - repositories: [{ id: 'repo-123', name: 'test-repo', owner: 'test-owner' }], - organizations: [], - enterprises: [] - }); - return () => {}; - }); - - render(PoolsPage); - - await waitFor(() => { - // Wait for data to load through API integration - expect(eagerCacheManager.getPools).toHaveBeenCalled(); - }); - - // Update API should be available for the update workflow - expect(garmApi.updatePool).toBeDefined(); - - // Should display pools page structure - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - }); - - it('should show success toast after pool update', async () => { - render(PoolsPage); - - await waitFor(() => { - expect(eagerCacheManager.getPools).toHaveBeenCalled(); - }); - - // Should have success toast functionality - expect(toastStore.add).toBeDefined(); - }); - - it('should handle update error integration', async () => { - // Set up API to fail when updatePool is called - const error = new Error('Pool update failed'); - (garmApi.updatePool as any).mockRejectedValue(error); - - render(PoolsPage); - - await waitFor(() => { - expect(eagerCacheManager.getPools).toHaveBeenCalled(); - }); - - // Should have error handling infrastructure in place - expect(garmApi.updatePool).toBeDefined(); - expect(toastStore.add).toBeDefined(); - }); - }); - - describe('Pool Deletion Integration', () => { - it('should handle pool deletion workflow', async () => { - // Mock cache with pools data - vi.mocked(eagerCache.subscribe).mockImplementation((callback: any) => { - callback({ - pools: mockPools, - loaded: { pools: true }, - loading: { pools: false }, - errorMessages: { pools: '' }, - repositories: [{ id: 'repo-123', name: 'test-repo', owner: 'test-owner' }], - organizations: [], - enterprises: [] - }); - return () => {}; - }); - - render(PoolsPage); - - await waitFor(() => { - // Wait for data to load through API integration - expect(eagerCacheManager.getPools).toHaveBeenCalled(); - }); - - // Delete API should be available for the delete workflow - expect(garmApi.deletePool).toBeDefined(); - - // Should display pools page structure - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - }); - - it('should handle delete error integration', async () => { - // Set up API to fail when deletePool is called - const error = new Error('Pool deletion failed'); - (garmApi.deletePool as any).mockRejectedValue(error); - - render(PoolsPage); - - await waitFor(() => { - expect(eagerCacheManager.getPools).toHaveBeenCalled(); - }); - - // Should have error handling infrastructure in place - expect(garmApi.deletePool).toBeDefined(); - expect(toastStore.add).toBeDefined(); - }); - }); - - describe('Eager Cache Integration', () => { - it('should load data from eager cache on mount', async () => { - render(PoolsPage); - - // Wait for cache calls to complete and data to be displayed - await waitFor(() => { - // Verify the component actually called the cache to load data - expect(eagerCacheManager.getPools).toHaveBeenCalled(); - }); - }); - - it('should display loading state initially then show data', async () => { - // Mock delayed cache response - (eagerCacheManager.getPools as any).mockImplementation(() => - new Promise(resolve => setTimeout(() => resolve(mockPools), 100)) - ); - - // Mock loading state initially - vi.mocked(eagerCache.subscribe).mockImplementation((callback: any) => { - callback({ - pools: [], - loaded: { pools: false }, - loading: { pools: true }, - errorMessages: { pools: '' }, - repositories: [], - organizations: [], - enterprises: [] - }); - return () => {}; - }); - - render(PoolsPage); - - // Component should render the loading state immediately - expect(screen.getByText(/Loading pools/i)).toBeInTheDocument(); - - // After cache resolves, data loading should be complete - await waitFor(() => { - expect(eagerCacheManager.getPools).toHaveBeenCalled(); - }, { timeout: 1000 }); - - // Component should handle data loading properly - expect(screen.getByText(/Loading pools/i)).toBeInTheDocument(); - }); - - it('should handle cache errors and display error state', async () => { - // Mock cache to fail - const error = new Error('Failed to load pools from cache'); - (eagerCacheManager.getPools as any).mockRejectedValue(error); - - // Mock cache error state - vi.mocked(eagerCache.subscribe).mockImplementation((callback: any) => { - callback({ - pools: [], - loaded: { pools: false }, - loading: { pools: false }, - errorMessages: { pools: 'Failed to load pools from cache' }, - repositories: [], - organizations: [], - enterprises: [] - }); - return () => {}; - }); - - const { container } = render(PoolsPage); - - // Wait for error to be handled - await waitFor(() => { - // Component should handle the error gracefully and continue to render - expect(container).toBeInTheDocument(); - }); - - // Should still render page structure even when data loading fails - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - }); - - it('should handle retry functionality', async () => { - render(PoolsPage); - - await waitFor(() => { - // Should handle retry integration correctly - expect(eagerCacheManager.retryResource).toBeDefined(); - }); - - // Should provide retry functionality through the cache manager - expect(eagerCacheManager.retryResource).toBeDefined(); - }); - }); - - describe('Search and Filtering Integration', () => { - it('should integrate search functionality with data filtering', async () => { - // Mock cache with multiple pools - const multiplePools = [ - createMockPool({ id: 'pool-1', repo_id: 'repo-1' }), - createMockPool({ id: 'pool-2', repo_id: 'repo-2' }) - ]; - - vi.mocked(eagerCache.subscribe).mockImplementation((callback: any) => { - callback({ - pools: multiplePools, - loaded: { pools: true }, - loading: { pools: false }, - errorMessages: { pools: '' }, - repositories: [ - { id: 'repo-1', name: 'test-repo-1', owner: 'test-owner' }, - { id: 'repo-2', name: 'other-repo', owner: 'other-owner' } - ], - organizations: [], - enterprises: [] - }); - return () => {}; - }); - - render(PoolsPage); - - await waitFor(() => { - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - }); - - // Should have search functionality - const searchInput = screen.getByPlaceholderText(/Search by entity name/i); - expect(searchInput).toBeInTheDocument(); - - // Search should filter results - await fireEvent.input(searchInput, { target: { value: 'test-repo-1' } }); - // Note: Filtering would be handled by the component's reactive logic - }); - - it('should integrate pagination with filtered data', async () => { - // Mock cache with many pools - const manyPools = Array.from({ length: 30 }, (_, i) => - createMockPool({ id: `pool-${i}` }) - ); - - vi.mocked(eagerCache.subscribe).mockImplementation((callback: any) => { - callback({ - pools: manyPools, - loaded: { pools: true }, - loading: { pools: false }, - errorMessages: { pools: '' }, - repositories: [], - organizations: [], - enterprises: [] - }); - return () => {}; - }); - - render(PoolsPage); - - await waitFor(() => { - expect(screen.getByText(/Show:/i)).toBeInTheDocument(); - }); - - // Should show pagination controls - expect(screen.getByText(/Show:/i)).toBeInTheDocument(); - expect(screen.getByDisplayValue('25')).toBeInTheDocument(); - }); - }); - - describe('Component Integration and State Management', () => { - it('should integrate all sections with proper data flow', async () => { - render(PoolsPage); - - await waitFor(() => { - // All sections should integrate properly with the main page - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - expect(eagerCacheManager.getPools).toHaveBeenCalled(); - }); - - // Data flow should be properly integrated through the cache system - expect(screen.getByText(/Loading pools/i)).toBeInTheDocument(); - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - }); - - it('should maintain consistent state across components', async () => { - render(PoolsPage); - - await waitFor(() => { - // State should be consistent across all child components - // Data should be integrated through the cache system - expect(eagerCacheManager.getPools).toHaveBeenCalled(); - }); - - // All sections should display consistent data - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - }); - - it('should handle component lifecycle correctly', () => { - const { unmount } = render(PoolsPage); - - // Should unmount without errors - expect(() => unmount()).not.toThrow(); - }); - }); - - describe('Modal Integration', () => { - it('should integrate modal workflows with main page state', async () => { - render(PoolsPage); - - await waitFor(() => { - expect(eagerCacheManager.getPools).toHaveBeenCalled(); - }); - - // Should integrate create modal workflow - const addButton = screen.getByRole('button', { name: /Add Pool/i }); - await fireEvent.click(addButton); - expect(screen.getByText(/Create Pool/i)).toBeInTheDocument(); - - // Modal should integrate with main page state - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - }); - - it('should handle modal close and state cleanup', async () => { - render(PoolsPage); - - await waitFor(() => { - expect(eagerCacheManager.getPools).toHaveBeenCalled(); - }); - - // Open modal - const addButton = screen.getByRole('button', { name: /Add Pool/i }); - await fireEvent.click(addButton); - expect(screen.getByText(/Create Pool/i)).toBeInTheDocument(); - - // Close modal (would be handled by modal's close event) - // State should be properly cleaned up - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - }); - }); - - describe('Error Handling Integration', () => { - it('should integrate comprehensive error handling', async () => { - // Set up various error scenarios - const error = new Error('Network error'); - (eagerCacheManager.getPools as any).mockRejectedValue(error); - - render(PoolsPage); - - await waitFor(() => { - // Should handle errors gracefully - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - }); - - // Should maintain page structure during errors - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - }); - - it('should handle API operation errors', async () => { - // Mock API operations to fail - (garmApi.updatePool as any).mockRejectedValue(new Error('Update failed')); - (garmApi.deletePool as any).mockRejectedValue(new Error('Delete failed')); - - render(PoolsPage); - - await waitFor(() => { - // Should handle API errors gracefully - expect(eagerCacheManager.getPools).toHaveBeenCalled(); - }); - - // Error handling infrastructure should be in place - expect(toastStore.add).toBeDefined(); - }); - }); - - describe('Real-time Updates Integration', () => { - it('should handle real-time pool updates through cache', async () => { - render(PoolsPage); - - await waitFor(() => { - // Should handle real-time updates through eager cache - expect(eagerCache.subscribe).toHaveBeenCalled(); - }); - - // Real-time update events should be handled through cache subscription - expect(eagerCache.subscribe).toHaveBeenCalled(); - }); - - it('should handle real-time pool creation', async () => { - render(PoolsPage); - - await waitFor(() => { - // Should handle real-time creation through eager cache - expect(eagerCache.subscribe).toHaveBeenCalled(); - }); - - // Real-time creation should be handled through cache updates - expect(eagerCache.subscribe).toHaveBeenCalled(); - }); - - it('should handle real-time pool deletion', async () => { - render(PoolsPage); - - await waitFor(() => { - // Should handle real-time deletion through eager cache - expect(eagerCache.subscribe).toHaveBeenCalled(); - }); - - // Real-time deletion should be handled through cache updates - expect(eagerCache.subscribe).toHaveBeenCalled(); - }); - }); - - describe('Entity Relationship Integration', () => { - it('should integrate pool entity relationships', async () => { - // Mock cache with pools and related entities - vi.mocked(eagerCache.subscribe).mockImplementation((callback: any) => { - callback({ - pools: mockPools, - loaded: { pools: true }, - loading: { pools: false }, - errorMessages: { pools: '' }, - repositories: [{ id: 'repo-123', name: 'test-repo', owner: 'test-owner' }], - organizations: [{ id: 'org-123', name: 'test-org' }], - enterprises: [{ id: 'ent-123', name: 'test-enterprise' }] - }); - return () => {}; - }); - - render(PoolsPage); - - await waitFor(() => { - // Should integrate entity relationships - expect(eagerCacheManager.getPools).toHaveBeenCalled(); - }); - - // Entity relationships should be integrated - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - }); - - it('should handle different pool entity types', async () => { - // Mock pools associated with different entity types - const multiEntityPools = [ - createMockPool({ id: 'pool-repo', repo_id: 'repo-123' }), - createMockPool({ id: 'pool-org', org_id: 'org-123', repo_id: undefined }), - createMockPool({ id: 'pool-ent', enterprise_id: 'ent-123', repo_id: undefined }) - ]; - - vi.mocked(eagerCache.subscribe).mockImplementation((callback: any) => { - callback({ - pools: multiEntityPools, - loaded: { pools: true }, - loading: { pools: false }, - errorMessages: { pools: '' }, - repositories: [{ id: 'repo-123', name: 'test-repo', owner: 'test-owner' }], - organizations: [{ id: 'org-123', name: 'test-org' }], - enterprises: [{ id: 'ent-123', name: 'test-enterprise' }] - }); - return () => {}; - }); - - render(PoolsPage); - - await waitFor(() => { - // Should handle different entity types - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - }); - - // Should display pools page structure correctly - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/pools/page.render.test.ts b/webapp/src/routes/pools/page.render.test.ts deleted file mode 100644 index 14362716..00000000 --- a/webapp/src/routes/pools/page.render.test.ts +++ /dev/null @@ -1,527 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render, screen, fireEvent } from '@testing-library/svelte'; -import PoolsPage from './+page.svelte'; -import { createMockPool } from '../../test/factories.js'; - -// Mock all external dependencies -vi.mock('$app/stores', () => ({})); - -vi.mock('$app/navigation', () => ({})); - -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - updatePool: vi.fn(), - deletePool: vi.fn() - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - add: vi.fn(), - error: vi.fn(), - info: vi.fn() - } -})); - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback: any) => { - callback({ - pools: [], - loaded: { pools: false }, - loading: { pools: false }, - errorMessages: { pools: '' }, - repositories: [], - organizations: [], - enterprises: [] - }); - return () => {}; - }) - }, - eagerCacheManager: { - getPools: vi.fn(), - retryResource: vi.fn() - } -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -vi.mock('$lib/utils/common.js', async (importOriginal) => { - const actual = await importOriginal() as any; - return { - ...(actual as any), - getEntityName: vi.fn((pool, cache) => pool.repo_name || pool.org_name || pool.ent_name || 'Test Entity'), - filterEntities: vi.fn((entities, searchTerm, nameGetter) => { - if (!searchTerm) return entities; - return entities.filter((entity: any) => { - const name = nameGetter ? nameGetter(entity) : entity.name; - return name?.toLowerCase().includes(searchTerm.toLowerCase()); - }); - }) - }; -}); - -const mockPool = createMockPool({ - id: 'pool-123', - image: 'ubuntu:22.04', - flavor: 'default', - provider_name: 'test-provider', - enabled: true, - repo_id: 'repo-123' -}); - -const mockPools = [mockPool]; - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/PageHeader.svelte'); -vi.unmock('$lib/components/DataTable.svelte'); -vi.unmock('$lib/components/CreatePoolModal.svelte'); -vi.unmock('$lib/components/UpdatePoolModal.svelte'); -vi.unmock('$lib/components/DeleteModal.svelte'); -vi.unmock('$lib/components/cells'); - -describe('Pools Page - Render Tests', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up default eager cache mocks - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - (eagerCacheManager.getPools as any).mockResolvedValue(mockPools); - }); - - describe('Basic Rendering', () => { - it('should render without crashing', () => { - const { container } = render(PoolsPage); - expect(container).toBeInTheDocument(); - }); - - it('should have proper document structure', () => { - const { container } = render(PoolsPage); - expect(container.querySelector('div')).toBeInTheDocument(); - }); - - it('should render page header', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have page header - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - expect(screen.getByText('Manage runner pools across all entities')).toBeInTheDocument(); - }); - - it('should render data table', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have DataTable rendered - check for elements that are always present - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - expect(screen.getByDisplayValue('25')).toBeInTheDocument(); - expect(screen.getByText(/Show:/i)).toBeInTheDocument(); - }); - - it('should render add pool button', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have add pool button - expect(screen.getByRole('button', { name: /Add Pool/i })).toBeInTheDocument(); - }); - }); - - describe('Component Lifecycle', () => { - it('should mount successfully', () => { - const { component } = render(PoolsPage); - expect(component).toBeDefined(); - }); - - it('should unmount without errors', () => { - const { unmount } = render(PoolsPage); - expect(() => unmount()).not.toThrow(); - }); - - it('should handle component updates', async () => { - const { component } = render(PoolsPage); - - // Component should handle reactive updates - expect(component).toBeDefined(); - }); - - it('should load pools on mount', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - - render(PoolsPage); - - // Wait for component mount and data loading - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should call eager cache to load pools - expect(eagerCacheManager.getPools).toHaveBeenCalled(); - }); - - it('should subscribe to eager cache on mount', async () => { - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - - render(PoolsPage); - - // Wait for component mount - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should subscribe to eager cache - expect(eagerCache.subscribe).toHaveBeenCalled(); - }); - }); - - describe('DOM Structure', () => { - it('should create proper DOM hierarchy', async () => { - const { container } = render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have main container with proper spacing - const mainDiv = container.querySelector('div.space-y-6'); - expect(mainDiv).toBeInTheDocument(); - }); - - it('should render svelte:head for page title', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should set page title - expect(document.title).toContain('Pools - GARM'); - }); - - it('should handle error display conditionally', async () => { - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - - // Mock cache with error - vi.mocked(eagerCache.subscribe).mockImplementation((callback: any) => { - callback({ - pools: [], - loaded: { pools: false }, - loading: { pools: false }, - errorMessages: { pools: 'Test error' }, - repositories: [], - organizations: [], - enterprises: [] - }); - return () => {}; - }); - - render(PoolsPage); - - // Wait for error handling - await new Promise(resolve => setTimeout(resolve, 100)); - - // Error display should be conditional - expect(screen.getByText(/Test error/i)).toBeInTheDocument(); - }); - - it('should render loading state initially', async () => { - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - - // Mock loading state - vi.mocked(eagerCache.subscribe).mockImplementation((callback: any) => { - callback({ - pools: [], - loaded: { pools: false }, - loading: { pools: true }, - errorMessages: { pools: '' }, - repositories: [], - organizations: [], - enterprises: [] - }); - return () => {}; - }); - - render(PoolsPage); - - // Should show loading initially - expect(screen.getByText(/Loading pools/i)).toBeInTheDocument(); - }); - }); - - describe('Data Table Rendering', () => { - it('should render data table with correct configuration', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should render DataTable with correct search and pagination - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - expect(screen.getByDisplayValue('25')).toBeInTheDocument(); - }); - - it('should render search functionality', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should render search input - const searchInput = screen.getByPlaceholderText(/Search by entity name/i); - expect(searchInput).toBeInTheDocument(); - expect(searchInput).toHaveAttribute('type', 'text'); - }); - - it('should render pagination controls', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should render pagination - expect(screen.getByText(/Show:/i)).toBeInTheDocument(); - expect(screen.getByDisplayValue('25')).toBeInTheDocument(); - }); - - it('should render empty state when no pools', async () => { - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - - // Mock empty pools - vi.mocked(eagerCache.subscribe).mockImplementation((callback: any) => { - callback({ - pools: [], - loaded: { pools: true }, - loading: { pools: false }, - errorMessages: { pools: '' }, - repositories: [], - organizations: [], - enterprises: [] - }); - return () => {}; - }); - - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should render empty state - expect(screen.getByText(/No pools found/i)).toBeInTheDocument(); - }); - - it('should render retry button on cache error', async () => { - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - - // Mock cache error - vi.mocked(eagerCache.subscribe).mockImplementation((callback: any) => { - callback({ - pools: [], - loaded: { pools: false }, - loading: { pools: false }, - errorMessages: { pools: 'Cache error' }, - repositories: [], - organizations: [], - enterprises: [] - }); - return () => {}; - }); - - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should render retry button - expect(screen.getByRole('button', { name: /Retry/i })).toBeInTheDocument(); - }); - }); - - describe('Modal Rendering', () => { - it('should conditionally render create pool modal', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Create modal should not be visible initially - expect(screen.queryByText('Create Pool')).not.toBeInTheDocument(); - }); - - it('should show create modal when add button clicked', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Click add pool button - const addButton = screen.getByRole('button', { name: /Add Pool/i }); - await fireEvent.click(addButton); - - // Should show create modal - expect(screen.getByText(/Create Pool/i)).toBeInTheDocument(); - }); - - it('should conditionally render update pool modal', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Update modal should not be visible initially - expect(screen.queryByText('Update Pool')).not.toBeInTheDocument(); - }); - - it('should conditionally render delete pool modal', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Delete modal should not be visible initially - expect(screen.queryByText('Delete Pool')).not.toBeInTheDocument(); - }); - }); - - describe('Pool Data Rendering', () => { - it('should render pool data when available', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should render the page structure correctly - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - }); - - it('should handle different pool states', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should render the page structure correctly - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - }); - - it('should handle pool filtering and pagination', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should render pagination controls - expect(screen.getByText(/Show:/i)).toBeInTheDocument(); - expect(screen.getByDisplayValue('25')).toBeInTheDocument(); - }); - }); - - describe('Interactive Elements', () => { - it('should handle search input interaction', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have interactive search input - const searchInput = screen.getByPlaceholderText(/Search by entity name/i); - await fireEvent.input(searchInput, { target: { value: 'test' } }); - - // Input should be interactive - expect(searchInput).toHaveValue('test'); - }); - - it('should handle pagination interaction', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have interactive pagination controls - const perPageSelect = screen.getByDisplayValue('25'); - expect(perPageSelect).toBeInTheDocument(); - }); - - it('should handle add pool button interaction', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have interactive add button - const addButton = screen.getByRole('button', { name: /Add Pool/i }); - expect(addButton).toBeInTheDocument(); - - // Button should be clickable - await fireEvent.click(addButton); - expect(screen.getByText(/Create Pool/i)).toBeInTheDocument(); - }); - }); - - describe('Responsive Layout', () => { - it('should use responsive layout classes', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have responsive layout - const mainContainer = document.querySelector('.space-y-6'); - expect(mainContainer).toBeInTheDocument(); - }); - - it('should handle mobile-friendly layout', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should be configured for mobile responsiveness through DataTable - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - }); - }); - - describe('Accessibility', () => { - it('should have proper accessibility attributes', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have proper ARIA attributes and labels - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - expect(screen.getByRole('button', { name: /Add Pool/i })).toBeInTheDocument(); - }); - - it('should be keyboard navigable', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have focusable elements - const searchInput = screen.getByPlaceholderText(/Search by entity name/i); - expect(searchInput).toBeInTheDocument(); - - const addButton = screen.getByRole('button', { name: /Add Pool/i }); - expect(addButton).toBeInTheDocument(); - }); - - it('should handle screen reader compatibility', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should be compatible with screen readers - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/pools/page.test.ts b/webapp/src/routes/pools/page.test.ts deleted file mode 100644 index a9882895..00000000 --- a/webapp/src/routes/pools/page.test.ts +++ /dev/null @@ -1,715 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render, screen, waitFor, fireEvent } from '@testing-library/svelte'; -import PoolsPage from './+page.svelte'; -import { createMockPool } from '../../test/factories.js'; - -// Helper function to create complete EagerCacheState objects -function createMockCacheState(overrides: any = {}) { - return { - pools: [], - repositories: [], - organizations: [], - enterprises: [], - scalesets: [], - credentials: [], - endpoints: [], - controllerInfo: null, - loaded: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: false, - credentials: false, - endpoints: false, - controllerInfo: false - }, - loading: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: false, - credentials: false, - endpoints: false, - controllerInfo: false - }, - errorMessages: { - repositories: '', - organizations: '', - enterprises: '', - pools: '', - scalesets: '', - credentials: '', - endpoints: '', - controllerInfo: '' - }, - ...overrides - }; -} - -// Mock the page stores -vi.mock('$app/stores', () => ({})); - -// Mock navigation -vi.mock('$app/navigation', () => ({})); - -// Mock the API client -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - updatePool: vi.fn(), - deletePool: vi.fn() - } -})); - -// Mock stores -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - add: vi.fn(), - error: vi.fn(), - info: vi.fn() - } -})); - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback: any) => { - callback(createMockCacheState()); - return () => {}; - }) - }, - eagerCacheManager: { - getPools: vi.fn(), - retryResource: vi.fn() - } -})); - -// Mock utilities -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -vi.mock('$lib/utils/common.js', async (importOriginal) => { - const actual = await importOriginal() as any; - return { - ...(actual as any), - getEntityName: vi.fn((pool, cache) => pool.repo_name || pool.org_name || pool.ent_name || 'Unknown Entity'), - filterEntities: vi.fn((entities, searchTerm, nameGetter) => { - if (!searchTerm) return entities; - return entities.filter((entity: any) => { - const name = nameGetter ? nameGetter(entity) : entity.name; - return name?.toLowerCase().includes(searchTerm.toLowerCase()); - }); - }) - }; -}); - -const mockPool = createMockPool({ - id: 'pool-123', - image: 'ubuntu:22.04', - flavor: 'default', - provider_name: 'test-provider', - enabled: true, - repo_id: 'repo-123' -}); - -const mockPools = [mockPool]; - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/PageHeader.svelte'); -vi.unmock('$lib/components/DataTable.svelte'); -vi.unmock('$lib/components/CreatePoolModal.svelte'); -vi.unmock('$lib/components/UpdatePoolModal.svelte'); -vi.unmock('$lib/components/DeleteModal.svelte'); -vi.unmock('$lib/components/cells'); - -describe('Pools Page - Unit Tests', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up default eager cache mock - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - (eagerCacheManager.getPools as any).mockResolvedValue(mockPools); - }); - - describe('Component Initialization', () => { - it('should render successfully', () => { - const { container } = render(PoolsPage); - expect(container).toBeInTheDocument(); - }); - - it('should set page title', () => { - render(PoolsPage); - expect(document.title).toContain('Pools - GARM'); - }); - - it('should display page header with correct props', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should display header with pools title - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - expect(screen.getByText('Manage runner pools across all entities')).toBeInTheDocument(); - }); - }); - - describe('Data Loading', () => { - it('should load pools on mount', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - - render(PoolsPage); - - // Wait for component mount - await new Promise(resolve => setTimeout(resolve, 0)); - - expect(eagerCacheManager.getPools).toHaveBeenCalled(); - }); - - it('should handle loading state', async () => { - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - - // Mock loading state - vi.mocked(eagerCache.subscribe).mockImplementation((callback) => { - callback(createMockCacheState({ - loading: { - repositories: false, - organizations: false, - enterprises: false, - pools: true, - scalesets: false, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(PoolsPage); - - // Should show loading indicator - expect(screen.getByText(/Loading pools/i)).toBeInTheDocument(); - }); - - it('should handle API error state', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - - // Mock API to fail - const error = new Error('Failed to load pools'); - (eagerCacheManager.getPools as any).mockRejectedValue(error); - - render(PoolsPage); - - // Wait for the error to be handled - await new Promise(resolve => setTimeout(resolve, 100)); - - // Component should handle error gracefully - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - }); - - it('should retry loading pools', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - - render(PoolsPage); - - // Verify retry functionality is available - expect(eagerCacheManager.retryResource).toBeDefined(); - }); - }); - - describe('Search and Filtering', () => { - it('should handle search functionality', async () => { - render(PoolsPage); - - // Component should have search filtering logic available - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - - // Verify search field is properly configured - const searchInput = screen.getByPlaceholderText(/Search by entity name/i); - expect(searchInput).toHaveAttribute('type', 'text'); - }); - - it('should filter pools by entity name', async () => { - const { filterEntities } = await import('$lib/utils/common.js'); - - render(PoolsPage); - - // Component should filter pools by entity name since pools don't have names - expect(filterEntities).toBeDefined(); - - // Component should handle entity name filtering - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - }); - - it('should handle pagination', async () => { - render(PoolsPage); - - // Component should handle pagination state through the DataTable - expect(screen.getByText(/Loading pools/i)).toBeInTheDocument(); - - // Pagination controls should be available - expect(screen.getByText(/Show:/i)).toBeInTheDocument(); - expect(screen.getByDisplayValue('25')).toBeInTheDocument(); - }); - }); - - describe('Pool Creation', () => { - it('should have create pool functionality', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have add pool button - expect(screen.getByRole('button', { name: /Add Pool/i })).toBeInTheDocument(); - }); - - it('should open create modal when add button clicked', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Click add pool button - const addButton = screen.getByRole('button', { name: /Add Pool/i }); - await fireEvent.click(addButton); - - // Should show create modal - expect(screen.getByText(/Create Pool/i)).toBeInTheDocument(); - }); - - it('should handle successful pool creation', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(PoolsPage); - - // Should have success toast functionality - expect(toastStore.success).toBeDefined(); - }); - }); - - describe('Pool Update', () => { - it('should have update pool functionality', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(PoolsPage); - - expect(garmApi.updatePool).toBeDefined(); - }); - - it('should show success toast after pool update', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(PoolsPage); - - expect(toastStore.add).toBeDefined(); - }); - - it('should handle update errors', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(PoolsPage); - - expect(toastStore.add).toBeDefined(); - }); - }); - - describe('Pool Deletion', () => { - it('should have delete pool functionality', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(PoolsPage); - - expect(garmApi.deletePool).toBeDefined(); - }); - - it('should show success toast after pool deletion', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(PoolsPage); - - expect(toastStore.add).toBeDefined(); - }); - - it('should handle deletion errors', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(PoolsPage); - - expect(toastStore.add).toBeDefined(); - }); - }); - - describe('Modal Management', () => { - it('should handle create modal state', async () => { - render(PoolsPage); - - // Wait for component initialization - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have create modal infrastructure - expect(screen.getByRole('button', { name: /Add Pool/i })).toBeInTheDocument(); - }); - - it('should handle update modal state', async () => { - render(PoolsPage); - - // Component should have update API for modal functionality - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.updatePool).toBeDefined(); - - // Should have toast notifications for update feedback - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.add).toBeDefined(); - }); - - it('should handle delete modal state', async () => { - render(PoolsPage); - - // Component should have delete API for modal functionality - const { garmApi } = await import('$lib/api/client.js'); - expect(garmApi.deletePool).toBeDefined(); - - // Should have toast notifications for delete feedback - const { toastStore } = await import('$lib/stores/toast.js'); - expect(toastStore.add).toBeDefined(); - }); - - it('should handle modal close functionality', () => { - render(PoolsPage); - - // Component should manage modal state for various operations - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - - // Modal infrastructure should be ready - expect(document.body).toBeInTheDocument(); - }); - }); - - describe('Eager Cache Integration', () => { - it('should subscribe to eager cache on mount', async () => { - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - - render(PoolsPage); - - // Wait for component mount - await new Promise(resolve => setTimeout(resolve, 0)); - - expect(eagerCache.subscribe).toHaveBeenCalled(); - }); - - it('should handle cache data updates', async () => { - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - - // Mock cache with pools data - vi.mocked(eagerCache.subscribe).mockImplementation((callback) => { - callback(createMockCacheState({ - pools: mockPools, - loaded: { - repositories: false, - organizations: false, - enterprises: false, - pools: true, - scalesets: false, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(PoolsPage); - - // Component should handle cache updates - expect(eagerCache.subscribe).toHaveBeenCalled(); - }); - - it('should handle cache error states', async () => { - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - - // Mock cache with error - vi.mocked(eagerCache.subscribe).mockImplementation((callback) => { - callback(createMockCacheState({ - errorMessages: { - repositories: '', - organizations: '', - enterprises: '', - pools: 'Failed to load pools', - scalesets: '', - credentials: '', - endpoints: '', - controllerInfo: '' - } - })); - return () => {}; - }); - - render(PoolsPage); - - // Should handle cache errors - expect(eagerCache.subscribe).toHaveBeenCalled(); - }); - }); - - describe('Component Lifecycle', () => { - it('should mount successfully', () => { - const component = render(PoolsPage); - expect(component.component).toBeDefined(); - }); - - it('should unmount without errors', () => { - const { unmount } = render(PoolsPage); - expect(() => unmount()).not.toThrow(); - }); - - it('should handle component initialization', async () => { - const { container } = render(PoolsPage); - - // Component should initialize and render properly - expect(container).toBeInTheDocument(); - - // Should set page title during initialization - expect(document.title).toContain('Pools - GARM'); - - // Should load pools during initialization - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - expect(eagerCacheManager.getPools).toBeDefined(); - }); - }); - - describe('Data Transformation', () => { - it('should handle pool filtering logic', async () => { - const { filterEntities } = await import('$lib/utils/common.js'); - - render(PoolsPage); - - // Component should filter pools by entity name - expect(filterEntities).toBeDefined(); - - // Search functionality should be available - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - }); - - it('should handle pagination calculations', async () => { - // Mock eager cache with loading state - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - vi.mocked(eagerCache.subscribe).mockImplementation((callback: any) => { - callback(createMockCacheState({ - loading: { - repositories: false, - organizations: false, - enterprises: false, - pools: true, - scalesets: false, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(PoolsPage); - - // Should show loading state - expect(screen.getByText(/Loading pools/i)).toBeInTheDocument(); - - // Pagination controls should be available - expect(screen.getByDisplayValue('25')).toBeInTheDocument(); - }); - - it('should handle entity name resolution', async () => { - const { getEntityName } = await import('$lib/utils/common.js'); - - render(PoolsPage); - - // Component should resolve entity names for pools - expect(getEntityName).toBeDefined(); - - // Component should display entity information - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - }); - }); - - describe('Event Handling', () => { - it('should handle table search events', async () => { - // Mock eager cache with loading state - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - vi.mocked(eagerCache.subscribe).mockImplementation((callback: any) => { - callback(createMockCacheState({ - loading: { - repositories: false, - organizations: false, - enterprises: false, - pools: true, - scalesets: false, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(PoolsPage); - - // Should show loading state - expect(screen.getByText(/Loading pools/i)).toBeInTheDocument(); - - // Search input should be available for search events - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - }); - - it('should handle table pagination events', async () => { - // Mock eager cache with loading state - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - vi.mocked(eagerCache.subscribe).mockImplementation((callback: any) => { - callback(createMockCacheState({ - loading: { - repositories: false, - organizations: false, - enterprises: false, - pools: true, - scalesets: false, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(PoolsPage); - - // Should show loading state - expect(screen.getByText(/Loading pools/i)).toBeInTheDocument(); - - // Pagination controls should be integrated - expect(screen.getByDisplayValue('25')).toBeInTheDocument(); - }); - - it('should handle edit events', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(PoolsPage); - - // Component should handle edit events from DataTable - expect(garmApi.updatePool).toBeDefined(); - - // Edit infrastructure should be ready - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - }); - - it('should handle delete events', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(PoolsPage); - - // Component should handle delete events from DataTable - expect(garmApi.deletePool).toBeDefined(); - - // Delete infrastructure should be ready - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - }); - - it('should handle retry events', async () => { - const { eagerCacheManager, eagerCache } = await import('$lib/stores/eager-cache.js'); - - // Mock eager cache with loading state - vi.mocked(eagerCache.subscribe).mockImplementation((callback: any) => { - callback(createMockCacheState({ - loading: { - repositories: false, - organizations: false, - enterprises: false, - pools: true, - scalesets: false, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(PoolsPage); - - // Component should handle retry events from DataTable - expect(eagerCacheManager.retryResource).toBeDefined(); - - // DataTable should be rendered for retry functionality - expect(screen.getByText(/Loading pools/i)).toBeInTheDocument(); - }); - }); - - describe('Utility Functions', () => { - it('should handle API error extraction', async () => { - const { extractAPIError } = await import('$lib/utils/apiError'); - - render(PoolsPage); - - expect(extractAPIError).toBeDefined(); - }); - - it('should handle pool identification', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(PoolsPage); - - // Component should identify pools by ID - expect(garmApi.updatePool).toBeDefined(); - expect(garmApi.deletePool).toBeDefined(); - - // Pool identification should work with pool IDs - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - }); - - it('should handle entity name computation', async () => { - const { getEntityName } = await import('$lib/utils/common.js'); - - render(PoolsPage); - - // Component should compute entity names for display - expect(getEntityName).toBeDefined(); - - // Entity name resolution should be integrated - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - }); - }); - - describe('Pool Configuration', () => { - it('should have proper DataTable column configuration', () => { - render(PoolsPage); - - // Component should configure DataTable with pool-specific columns - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - - // DataTable should be configured for pools - expect(screen.getByText(/Loading pools/i)).toBeInTheDocument(); - }); - - it('should have proper mobile card configuration', () => { - render(PoolsPage); - - // Component should configure mobile cards for pools - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - - // Mobile responsiveness should be configured - expect(screen.getByText(/Loading pools/i)).toBeInTheDocument(); - }); - - it('should handle pool status display', () => { - render(PoolsPage); - - // Component should display pool enabled/disabled status - expect(screen.getByRole('heading', { name: 'Pools' })).toBeInTheDocument(); - - // Status configuration should be ready - expect(screen.getByText(/Loading pools/i)).toBeInTheDocument(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/pools/pool-creation.test.ts b/webapp/src/routes/pools/pool-creation.test.ts deleted file mode 100644 index 387c0e90..00000000 --- a/webapp/src/routes/pools/pool-creation.test.ts +++ /dev/null @@ -1,168 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render } from '@testing-library/svelte'; -import CreatePoolModal from '$lib/components/CreatePoolModal.svelte'; - -// Mock the API client -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - listProviders: vi.fn().mockResolvedValue([]), - listRepositories: vi.fn().mockResolvedValue([]), - listOrganizations: vi.fn().mockResolvedValue([]), - listEnterprises: vi.fn().mockResolvedValue([]), - createRepositoryPool: vi.fn().mockResolvedValue({ id: 'pool1' }), - createOrganizationPool: vi.fn().mockResolvedValue({ id: 'pool2' }), - createEnterprisePool: vi.fn().mockResolvedValue({ id: 'pool3' }) - } -})); - -// Mock dependent components to simplify testing -vi.mock('$lib/components/Modal.svelte', () => ({ - default: function MockModal() { - return { $destroy: vi.fn(), $set: vi.fn(), $on: vi.fn() }; - } -})); - -vi.mock('$lib/components/JsonEditor.svelte', () => ({ - default: function MockJsonEditor() { - return { $destroy: vi.fn(), $set: vi.fn(), $on: vi.fn() }; - } -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -describe('Global Pools Page - Pool Creation Anti-Duplication Tests', () => { - beforeEach(() => { - vi.clearAllMocks(); - }); - - describe('Modal Configuration for Global Page', () => { - it('should render CreatePoolModal without initial entity props', () => { - // Global pools page opens modal without pre-selecting an entity - const component = render(CreatePoolModal, { - props: { - // No initialEntityType or initialEntityId - this is the key difference - } - }); - - // Component should render successfully - expect(component.container).toBeTruthy(); - }); - - it('should render CreatePoolModal with default empty props for global page', () => { - // When no props are provided, the modal uses default empty values - const component = render(CreatePoolModal); - - // Component should render successfully for global page scenario - expect(component.container).toBeTruthy(); - }); - }); - - describe('Anti-Duplication Logic Documentation', () => { - it('should document the architectural pattern that prevents duplicates', () => { - // BEFORE (caused duplicates): - // 1. Modal made API call - // 2. Modal dispatched submit event - // 3. Parent handled submit and ALSO made API call - // Result: 2 identical pools - - // AFTER (prevents duplicates): - // Global page: Modal makes API call, parent shows toast - // Entity page: Modal dispatches submit, parent makes API call - // Result: Exactly 1 API call per scenario - - const architecturalFix = { - problem: 'Both modal and parent made API calls', - solution: 'Conditional API calling based on modal configuration', - globalPagePattern: 'modal handles API, parent handles UI feedback', - entityPagePattern: 'modal validates form, parent handles API' - }; - - expect(architecturalFix.solution).toContain('Conditional'); - expect(architecturalFix.globalPagePattern).toContain('modal handles API'); - expect(architecturalFix.entityPagePattern).toContain('parent handles API'); - }); - - it('should document the conditional logic in CreatePoolModal handleSubmit', () => { - // The CreatePoolModal component contains this critical conditional logic: - // - // if (initialEntityType && initialEntityId) { - // // Entity pages: parent handles the API call - // dispatch('submit', params); - // } else { - // // Global pools page: modal handles the API call - // switch (entityLevel) { - // case 'repository': - // await garmApi.createRepositoryPool(selectedEntityId, params); - // break; - // // ... other cases - // } - // dispatch('submit', params); - // } - - const conditionalLogic = { - condition: 'initialEntityType && initialEntityId', - entityPageBehavior: 'dispatch submit event only', - globalPageBehavior: 'make API call then dispatch submit', - preventsDuplication: true - }; - - expect(conditionalLogic.condition).toBe('initialEntityType && initialEntityId'); - expect(conditionalLogic.entityPageBehavior).toBe('dispatch submit event only'); - expect(conditionalLogic.globalPageBehavior).toBe('make API call then dispatch submit'); - expect(conditionalLogic.preventsDuplication).toBe(true); - }); - }); - - describe('Component Integration', () => { - it('should verify CreatePoolModal can be configured for different usage patterns', () => { - // Test that the modal can adapt to different usage contexts - const globalPageModal = render(CreatePoolModal, { - props: {} // No initial entity props - }); - - const entityPageModal = render(CreatePoolModal, { - props: { - initialEntityType: 'repository', - initialEntityId: 'repo1' - } - }); - - // Both configurations should render successfully - expect(globalPageModal.container).toBeTruthy(); - expect(entityPageModal.container).toBeTruthy(); - - // The key difference is in the props passed, which drives - // the conditional logic in handleSubmit() - }); - - it('should verify the fix addresses the original duplicate pool issue', () => { - // Original issue: "when adding a new pool, it seems that we end up with two identical pools" - // Root cause: Both modal and parent components were making API calls - - const originalProblem = { - issue: 'Two identical pools created when adding new pool', - cause: 'Both modal and parent made API calls', - beforeFix: { - modalBehavior: 'Always made API call AND dispatched submit', - parentBehavior: 'Always handled submit event and made API call', - result: 'Duplicate API calls = duplicate pools' - } - }; - - const fixImplemented = { - solution: 'Conditional API calling based on initialEntityType prop', - afterFix: { - globalPage: 'Modal makes API call, parent shows success', - entityPage: 'Modal dispatches submit, parent makes API call', - result: 'Exactly one API call per scenario' - } - }; - - expect(originalProblem.cause).toBe('Both modal and parent made API calls'); - expect(fixImplemented.solution).toContain('Conditional API calling'); - expect(fixImplemented.afterFix.result).toBe('Exactly one API call per scenario'); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/repositories/+page.svelte b/webapp/src/routes/repositories/+page.svelte deleted file mode 100644 index 8471abaf..00000000 --- a/webapp/src/routes/repositories/+page.svelte +++ /dev/null @@ -1,335 +0,0 @@ - - - - Repositories - GARM - - -
                - - { showCreateModal = true; }} - /> - - -
                - - -{#if showCreateModal} - showCreateModal = false} - on:submit={handleCreateRepository} - /> -{/if} - -{#if showEditModal && editingRepository} - handleUpdateRepository(e.detail)} - /> -{/if} - -{#if showDeleteModal && deletingRepository} - -{/if} diff --git a/webapp/src/routes/repositories/[id]/+page.svelte b/webapp/src/routes/repositories/[id]/+page.svelte deleted file mode 100644 index f07a82e0..00000000 --- a/webapp/src/routes/repositories/[id]/+page.svelte +++ /dev/null @@ -1,402 +0,0 @@ - - - - {repository ? `${repository.name} - Repository Details` : 'Repository Details'} - GARM - - -
                - - - - {#if loading} -
                -
                -

                Loading repository...

                -
                - {:else if error} -
                -

                {error}

                -
                - {:else if repository} - - showUpdateModal = true} - onDelete={() => showDeleteModal = true} - /> - - - - - - - - - - - - - - - - {/if} -
                - - -{#if showUpdateModal && repository} - showUpdateModal = false} - on:submit={(e) => handleUpdate(e.detail)} - /> -{/if} - -{#if showDeleteModal && repository} - showDeleteModal = false} - on:confirm={handleDelete} - /> -{/if} - -{#if showDeleteInstanceModal && selectedInstance} - { showDeleteInstanceModal = false; selectedInstance = null; }} - on:confirm={handleDeleteInstance} - /> -{/if} - -{#if showCreatePoolModal && repository} - showCreatePoolModal = false} - on:submit={handleCreatePool} - /> -{/if} \ No newline at end of file diff --git a/webapp/src/routes/repositories/[id]/page.integration.test.ts b/webapp/src/routes/repositories/[id]/page.integration.test.ts deleted file mode 100644 index 77e0eda4..00000000 --- a/webapp/src/routes/repositories/[id]/page.integration.test.ts +++ /dev/null @@ -1,506 +0,0 @@ -import { describe, it, expect, beforeEach, vi } from 'vitest'; -import { render, screen, waitFor } from '@testing-library/svelte'; -import userEvent from '@testing-library/user-event'; -import '@testing-library/jest-dom'; -import { createMockRepository, createMockPool, createMockInstance } from '../../../test/factories.js'; - -// Create comprehensive test data -const mockRepository = createMockRepository({ - id: 'repo-123', - name: 'test-repo', - owner: 'test-owner', - events: [ - { - id: 1, - created_at: '2024-01-01T00:00:00Z', - event_level: 'info', - message: 'Repository created' - }, - { - id: 2, - created_at: '2024-01-01T01:00:00Z', - event_level: 'warning', - message: 'Pool configuration changed' - } - ], - pool_manager_status: { running: true, failure_reason: undefined } -}); - -const mockPools = [ - createMockPool({ - id: 'pool-1', - repo_id: 'repo-123', - image: 'ubuntu:22.04', - enabled: true - }), - createMockPool({ - id: 'pool-2', - repo_id: 'repo-123', - image: 'ubuntu:20.04', - enabled: false - }) -]; - -const mockInstances = [ - createMockInstance({ - id: 'inst-1', - name: 'runner-1', - pool_id: 'pool-1', - status: 'running' - }), - createMockInstance({ - id: 'inst-2', - name: 'runner-2', - pool_id: 'pool-2', - status: 'idle' - }) -]; - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/UpdateEntityModal.svelte'); -vi.unmock('$lib/components/DeleteModal.svelte'); -vi.unmock('$lib/components/EntityInformation.svelte'); -vi.unmock('$lib/components/DetailHeader.svelte'); -vi.unmock('$lib/components/PoolsSection.svelte'); -vi.unmock('$lib/components/InstancesSection.svelte'); -vi.unmock('$lib/components/EventsSection.svelte'); -vi.unmock('$lib/components/WebhookSection.svelte'); -vi.unmock('$lib/components/CreatePoolModal.svelte'); -vi.unmock('$lib/components/cells'); - -// Only mock the data layer - APIs and stores -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - getRepository: vi.fn(), - listRepositoryPools: vi.fn(), - listRepositoryInstances: vi.fn(), - updateRepository: vi.fn(), - deleteRepository: vi.fn(), - deleteInstance: vi.fn(), - createRepositoryPool: vi.fn(), - getRepositoryWebhookInfo: vi.fn().mockResolvedValue({ installed: false }) - } -})); - -vi.mock('$lib/stores/websocket.js', () => ({ - websocketStore: { - subscribe: vi.fn((callback) => { - callback({ connected: true, connecting: false, error: null }); - return () => {}; - }), - subscribeToEntity: vi.fn(() => vi.fn()) - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn(), - warning: vi.fn() - } -})); - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback) => { - callback({ - repositories: [], - pools: [], - instances: [], - loaded: { repositories: false, pools: false, instances: false }, - loading: { repositories: false, pools: false, instances: false }, - errorMessages: { repositories: '', pools: '', instances: '' } - }); - return () => {}; - }) - }, - eagerCacheManager: { - getRepositories: vi.fn(), - getPools: vi.fn(), - getInstances: vi.fn(), - retryResource: vi.fn() - } -})); - -// Mock SvelteKit modules -vi.mock('$app/stores', () => ({ - page: { - subscribe: vi.fn((callback) => { - callback({ params: { id: 'repo-123' } }); - return () => {}; - }) - } -})); - -vi.mock('$app/navigation', () => ({ - goto: vi.fn() -})); - -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path) => path) -})); - -// Import the repository details page with real UI components -import RepositoryDetailsPage from './+page.svelte'; - -describe('Comprehensive Integration Tests for Repository Details Page', () => { - let garmApi: any; - - beforeEach(async () => { - vi.clearAllMocks(); - - const apiClient = await import('$lib/api/client.js'); - garmApi = apiClient.garmApi; - - // Set up successful API responses - garmApi.getRepository.mockResolvedValue(mockRepository); - garmApi.listRepositoryPools.mockResolvedValue(mockPools); - garmApi.listRepositoryInstances.mockResolvedValue(mockInstances); - garmApi.updateRepository.mockResolvedValue({}); - garmApi.deleteRepository.mockResolvedValue({}); - garmApi.deleteInstance.mockResolvedValue({}); - garmApi.createRepositoryPool.mockResolvedValue({ id: 'new-pool' }); - }); - - describe('Component Rendering and Data Display', () => { - it('should render repository details page with real components', async () => { - const { container } = render(RepositoryDetailsPage); - - // Should render main container - expect(container.querySelector('.space-y-6')).toBeInTheDocument(); - - // Should render breadcrumbs - expect(screen.getByText('Repositories')).toBeInTheDocument(); - - // Should handle loading state initially - await waitFor(() => { - expect(container).toBeInTheDocument(); - }); - }); - - it('should display repository information correctly', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - // Should display repository name in breadcrumb or title - const titleElement = document.querySelector('title'); - expect(titleElement?.textContent).toContain('Repository Details'); - }); - }); - - it('should render breadcrumb navigation', async () => { - render(RepositoryDetailsPage); - - // Should show breadcrumb navigation - expect(screen.getByText('Repositories')).toBeInTheDocument(); - - // Breadcrumb should be clickable link - const repositoriesLink = screen.getByText('Repositories').closest('a'); - expect(repositoriesLink).toHaveAttribute('href', '/repositories'); - }); - - it('should display loading state correctly', async () => { - render(RepositoryDetailsPage); - - // Should show loading indicator initially - // Loading text might appear briefly or not at all in fast tests - expect(document.body).toBeInTheDocument(); - }); - }); - - describe('Error State Handling', () => { - it('should handle repository not found error', async () => { - garmApi.getRepository.mockRejectedValue(new Error('Repository not found')); - - render(RepositoryDetailsPage); - - await waitFor(() => { - // Should display error message - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle API errors gracefully', async () => { - garmApi.getRepository.mockRejectedValue(new Error('API Error')); - garmApi.listRepositoryPools.mockRejectedValue(new Error('Pools Error')); - garmApi.listRepositoryInstances.mockRejectedValue(new Error('Instances Error')); - - render(RepositoryDetailsPage); - - await waitFor(() => { - // Component should render without crashing - expect(document.body).toBeInTheDocument(); - }); - }); - }); - - describe('Repository Information Display', () => { - it('should display repository details when loaded', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - // Should display the repository information section - expect(document.body).toBeInTheDocument(); - }, { timeout: 3000 }); - }); - - it('should show forge icon and endpoint information', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - // Should render forge-specific information - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should display repository status correctly', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - // Should show pool manager status - expect(document.body).toBeInTheDocument(); - }); - }); - }); - - describe('Modal Interactions', () => { - it('should handle edit button click', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - // Look for edit button (might be in DetailHeader component) - const editButtons = document.querySelectorAll('button, [role="button"]'); - expect(editButtons.length).toBeGreaterThan(0); - }); - }); - - it('should handle delete button click', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - // Look for delete button - const deleteButtons = document.querySelectorAll('button, [role="button"]'); - expect(deleteButtons.length).toBeGreaterThan(0); - }); - }); - }); - - describe('Pools Section Integration', () => { - it('should display pools section with data', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - // Should render pools section - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle add pool button', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - // Look for add pool functionality - expect(document.body).toBeInTheDocument(); - }); - }); - }); - - describe('Instances Section Integration', () => { - it('should display instances section with data', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - // Should render instances section - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle instance deletion', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - // Look for instance management functionality - expect(document.body).toBeInTheDocument(); - }); - }); - }); - - describe('Events Section Integration', () => { - it('should display events section with event data', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - // Should render events section - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle events scrolling', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - // Should handle events display and scrolling - expect(document.body).toBeInTheDocument(); - }); - }); - }); - - describe('Webhook Section Integration', () => { - it('should display webhook section', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - // Should render webhook section - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle webhook management', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - // Should provide webhook management functionality - expect(document.body).toBeInTheDocument(); - }); - }); - }); - - describe('Real-time Updates via WebSocket', () => { - it('should set up websocket subscriptions', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - // Should set up websocket subscriptions - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle repository update events', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - // Component should be prepared to handle websocket updates - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle pool and instance events', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - // Should handle pool and instance websocket events - expect(document.body).toBeInTheDocument(); - }); - }); - }); - - describe('API Integration', () => { - it('should call repository API on mount', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - expect(garmApi.getRepository).toHaveBeenCalledWith('repo-123'); - expect(garmApi.listRepositoryPools).toHaveBeenCalledWith('repo-123'); - expect(garmApi.listRepositoryInstances).toHaveBeenCalledWith('repo-123'); - }); - }); - - }); - - describe('Component Integration and State Management', () => { - it('should integrate all sections with proper data flow', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - // All sections should integrate properly with the main page - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should maintain consistent state across components', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - // State should be consistent across all child components - expect(document.body).toBeInTheDocument(); - }); - }); - - it('should handle component lifecycle correctly', async () => { - const { unmount } = render(RepositoryDetailsPage); - - await waitFor(() => { - // Component should mount successfully - expect(document.body).toBeInTheDocument(); - }); - - // Should unmount cleanly - expect(() => unmount()).not.toThrow(); - }); - }); - - describe('User Interaction Flows', () => { - it('should support navigation interactions', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - // Should support breadcrumb navigation - const repoLink = screen.getByText('Repositories'); - expect(repoLink).toBeInTheDocument(); - }); - }); - - it('should handle keyboard navigation', async () => { - const user = userEvent.setup(); - render(RepositoryDetailsPage); - - await waitFor(() => { - // Should support keyboard navigation - expect(document.body).toBeInTheDocument(); - }); - - // Test tab navigation - await user.tab(); - }); - - it('should handle form submissions and modal interactions', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - // Should handle modal and form interactions - expect(document.body).toBeInTheDocument(); - }); - }); - }); - - describe('Accessibility and Responsive Design', () => { - it('should have proper accessibility attributes', async () => { - const { container } = render(RepositoryDetailsPage); - - await waitFor(() => { - // Should have proper ARIA labels and navigation - const nav = container.querySelector('nav[aria-label="Breadcrumb"]'); - expect(nav).toBeInTheDocument(); - }); - }); - - it('should be responsive across different viewport sizes', async () => { - const { container } = render(RepositoryDetailsPage); - - await waitFor(() => { - // Should render responsively - expect(container).toBeInTheDocument(); - }); - }); - - it('should handle screen reader compatibility', async () => { - render(RepositoryDetailsPage); - - await waitFor(() => { - // Should be compatible with screen readers - expect(document.body).toBeInTheDocument(); - }); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/repositories/[id]/page.render.test.ts b/webapp/src/routes/repositories/[id]/page.render.test.ts deleted file mode 100644 index 9f672acd..00000000 --- a/webapp/src/routes/repositories/[id]/page.render.test.ts +++ /dev/null @@ -1,183 +0,0 @@ -import { describe, it, expect, beforeEach, vi } from 'vitest'; -import { render } from '@testing-library/svelte'; -import { createMockRepository } from '../../../test/factories.js'; - -// Mock all external dependencies but keep the component rendering real -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - getRepository: vi.fn(), - listRepositoryPools: vi.fn(), - listRepositoryInstances: vi.fn(), - updateRepository: vi.fn(), - deleteRepository: vi.fn(), - deleteInstance: vi.fn(), - createRepositoryPool: vi.fn(), - getRepositoryWebhookInfo: vi.fn().mockResolvedValue({ installed: false }) - } -})); - -vi.mock('$lib/stores/websocket.js', () => ({ - websocketStore: { - subscribeToEntity: vi.fn(() => vi.fn()) - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn(), - warning: vi.fn() - } -})); - -// Mock SvelteKit modules -vi.mock('$app/stores', () => ({ - page: { - subscribe: vi.fn((callback) => { - callback({ params: { id: 'repo-123' } }); - return () => {}; - }) - } -})); - -vi.mock('$app/navigation', () => ({ - goto: vi.fn() -})); - -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path) => path) -})); - -vi.mock('$app/environment', () => ({ - browser: false, - dev: true, - building: false -})); - -// Mock child components -vi.mock('$lib/components/UpdateEntityModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/DeleteModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/EntityInformation.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/DetailHeader.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/PoolsSection.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/InstancesSection.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/EventsSection.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/WebhookSection.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/CreatePoolModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/utils/common.js', () => ({ - getForgeIcon: vi.fn((type) => ``) -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((error) => error.message || 'API Error') -})); - -import RepositoryDetailsPage from './+page.svelte'; - -describe('Repository Details Page Rendering Tests', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - const mockRepository = createMockRepository({ - id: 'repo-123', - name: 'test-repo', - owner: 'test-owner' - }); - - const { garmApi } = await import('$lib/api/client.js'); - (garmApi.getRepository as any).mockResolvedValue(mockRepository); - (garmApi.listRepositoryPools as any).mockResolvedValue([]); - (garmApi.listRepositoryInstances as any).mockResolvedValue([]); - }); - - describe('Component Rendering', () => { - it('should render without crashing', () => { - const { container } = render(RepositoryDetailsPage); - expect(container).toBeInTheDocument(); - }); - - it('should render as a valid DOM element', () => { - const { container } = render(RepositoryDetailsPage); - expect(container.firstChild).toBeInstanceOf(HTMLElement); - }); - - it('should have proper document title', () => { - render(RepositoryDetailsPage); - expect(document.title).toContain('Repository Details'); - }); - - it('should render with correct structure', () => { - const { container } = render(RepositoryDetailsPage); - expect(container.firstChild).toHaveClass('space-y-6'); - }); - - it('should handle empty state rendering', () => { - render(RepositoryDetailsPage); - // Component should render even with no repository data loaded - expect(document.body).toBeInTheDocument(); - }); - }); - - describe('Component Lifecycle', () => { - it('should mount successfully', () => { - const component = render(RepositoryDetailsPage); - expect(component.component).toBeDefined(); - }); - - it('should unmount without errors', () => { - const { unmount } = render(RepositoryDetailsPage); - expect(() => unmount()).not.toThrow(); - }); - }); - - describe('DOM Structure Validation', () => { - it('should create proper HTML structure', () => { - const { container } = render(RepositoryDetailsPage); - - // Should have main container with proper spacing - expect(container.querySelector('.space-y-6')).toBeInTheDocument(); - }); - - it('should handle conditional rendering', () => { - const { container } = render(RepositoryDetailsPage); - - // Component should render without any modals open initially - expect(container).toBeInTheDocument(); - }); - - it('should render with proper accessibility structure', () => { - const { container } = render(RepositoryDetailsPage); - - // Basic accessibility checks - expect(container).toBeInTheDocument(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/repositories/[id]/page.test.ts b/webapp/src/routes/repositories/[id]/page.test.ts deleted file mode 100644 index a991e2f8..00000000 --- a/webapp/src/routes/repositories/[id]/page.test.ts +++ /dev/null @@ -1,526 +0,0 @@ -import { describe, it, expect, beforeEach, vi } from 'vitest'; -import { render } from '@testing-library/svelte'; -import { createMockRepository, createMockInstance } from '../../../test/factories.js'; - -// Mock all external dependencies -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - getRepository: vi.fn(), - listRepositoryPools: vi.fn(), - listRepositoryInstances: vi.fn(), - updateRepository: vi.fn(), - deleteRepository: vi.fn(), - deleteInstance: vi.fn(), - createRepositoryPool: vi.fn(), - getRepositoryWebhookInfo: vi.fn().mockResolvedValue({ installed: false }) - } -})); - -vi.mock('$lib/stores/websocket.js', () => ({ - websocketStore: { - subscribeToEntity: vi.fn(() => vi.fn()) - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn(), - warning: vi.fn() - } -})); - -// Mock SvelteKit modules -vi.mock('$app/stores', () => ({ - page: { - subscribe: vi.fn((callback) => { - callback({ params: { id: 'repo-123' } }); - return () => {}; - }) - } -})); - -vi.mock('$app/navigation', () => ({ - goto: vi.fn() -})); - -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path) => path) -})); - -vi.mock('$app/environment', () => ({ - browser: false, - dev: true, - building: false -})); - -// Mock all child components -vi.mock('$lib/components/UpdateEntityModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/DeleteModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/EntityInformation.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/DetailHeader.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/PoolsSection.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/InstancesSection.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/EventsSection.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/WebhookSection.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/components/CreatePoolModal.svelte', () => ({ - default: vi.fn(() => ({ destroy: vi.fn(), $$set: vi.fn() })) -})); - -vi.mock('$lib/utils/common.js', () => ({ - getForgeIcon: vi.fn((type) => ``) -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((error) => error.message || 'API Error') -})); - -import RepositoryDetailsPage from './+page.svelte'; - -describe('Repository Details Page Unit Tests', () => { - let mockRepository: any; - let mockPools: any[]; - let mockInstances: any[]; - - beforeEach(async () => { - vi.clearAllMocks(); - - mockRepository = createMockRepository({ - id: 'repo-123', - name: 'test-repo', - owner: 'test-owner', - events: [ - { - id: 1, - created_at: '2024-01-01T00:00:00Z', - event_level: 'info', - message: 'Repository created' - } - ] - }); - - mockPools = [ - { id: 'pool-1', repo_id: 'repo-123', image: 'ubuntu:22.04' }, - { id: 'pool-2', repo_id: 'repo-123', image: 'ubuntu:20.04' } - ]; - - mockInstances = [ - createMockInstance({ id: 'inst-1', pool_id: 'pool-1' }), - createMockInstance({ id: 'inst-2', pool_id: 'pool-2' }) - ]; - - const { garmApi } = await import('$lib/api/client.js'); - (garmApi.getRepository as any).mockResolvedValue(mockRepository); - (garmApi.listRepositoryPools as any).mockResolvedValue(mockPools); - (garmApi.listRepositoryInstances as any).mockResolvedValue(mockInstances); - }); - - describe('Component Structure', () => { - it('should render repository details page', () => { - const { container } = render(RepositoryDetailsPage); - expect(container).toBeInTheDocument(); - }); - - it('should set dynamic page title', () => { - render(RepositoryDetailsPage); - // Title should be dynamic based on repository name - expect(document.title).toContain('Repository Details'); - }); - - it('should have repository state variables', () => { - const component = render(RepositoryDetailsPage); - expect(component).toBeDefined(); - }); - }); - - describe('Data Loading', () => { - it('should have API functions available for data loading', async () => { - const { garmApi } = await import('$lib/api/client.js'); - render(RepositoryDetailsPage); - - // Verify API functions are properly mocked and available - expect(garmApi.getRepository).toBeDefined(); - expect(garmApi.listRepositoryPools).toBeDefined(); - expect(garmApi.listRepositoryInstances).toBeDefined(); - }); - - it('should handle loading states correctly', () => { - const { container } = render(RepositoryDetailsPage); - // Component should handle initial loading state - expect(container).toBeInTheDocument(); - expect(document.title).toContain('Repository Details'); - }); - - it('should have error handling capabilities', async () => { - const { extractAPIError } = await import('$lib/utils/apiError'); - - render(RepositoryDetailsPage); - - // Verify error handling utility is available - const error = new Error('Test error'); - const result = extractAPIError(error); - expect(extractAPIError).toHaveBeenCalledWith(error); - expect(result).toBe('Test error'); - }); - }); - - describe('Repository Updates', () => { - it('should have proper structure for repository updates', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(RepositoryDetailsPage); - - // Unit tests verify the component has access to the right dependencies - // The actual update workflow is tested in integration tests where we can - // trigger the real handleUpdate function via UI interactions - expect(garmApi.updateRepository).toBeDefined(); - }); - - it('should show success toast after update', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(RepositoryDetailsPage); - - toastStore.success( - 'Repository Updated', - 'Repository test-owner/test-repo has been updated successfully.' - ); - - expect(toastStore.success).toHaveBeenCalledWith( - 'Repository Updated', - 'Repository test-owner/test-repo has been updated successfully.' - ); - }); - - it('should have proper error handling structure for updates', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(RepositoryDetailsPage); - - // Unit tests verify the component has access to the right dependencies - // The actual error re-throwing behavior is tested through integration tests - // where we can trigger the real handleUpdate function via modal events - expect(garmApi.updateRepository).toBeDefined(); - }); - }); - - describe('Repository Deletion', () => { - it('should have proper structure for repository deletion', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(RepositoryDetailsPage); - - // Unit tests verify the component has access to the right dependencies - // The actual deletion workflow is tested in integration tests where we can - // trigger the real handleDelete function via modal interactions - expect(garmApi.deleteRepository).toBeDefined(); - }); - - it('should redirect after successful deletion', async () => { - const { goto } = await import('$app/navigation'); - - render(RepositoryDetailsPage); - - goto('/repositories'); - expect(goto).toHaveBeenCalledWith('/repositories'); - }); - - it('should display error message when repository loading fails', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - // Simulate API error during repository loading - const error = new Error('Repository not found'); - (garmApi.getRepository as any).mockRejectedValue(error); - - const { container } = render(RepositoryDetailsPage); - - // Wait for the component to handle the error - await new Promise(resolve => setTimeout(resolve, 100)); - - // Check that error message is displayed in the UI - const errorElement = container.querySelector('.bg-red-50, .bg-red-900'); - expect(errorElement).toBeInTheDocument(); - }); - }); - - describe('Instance Management', () => { - it('should have proper structure for instance deletion', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(RepositoryDetailsPage); - - // Unit tests verify the component has access to the right dependencies - // The actual instance deletion workflow is tested in integration tests - expect(garmApi.deleteInstance).toBeDefined(); - }); - - it('should show success toast after instance deletion', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(RepositoryDetailsPage); - - toastStore.success( - 'Instance Deleted', - 'Instance inst-1 has been deleted successfully.' - ); - - expect(toastStore.success).toHaveBeenCalledWith( - 'Instance Deleted', - 'Instance inst-1 has been deleted successfully.' - ); - }); - - it('should have proper error handling structure for instance deletion', async () => { - const { garmApi } = await import('$lib/api/client.js'); - const { toastStore } = await import('$lib/stores/toast.js'); - - render(RepositoryDetailsPage); - - // Unit tests verify the component has access to the right dependencies - // Detailed error handling with UI interactions is tested in integration tests - expect(garmApi.deleteInstance).toBeDefined(); - expect(toastStore.error).toBeDefined(); - }); - }); - - describe('Pool Creation', () => { - it('should have proper structure for pool creation', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(RepositoryDetailsPage); - - // Unit tests verify the component has access to the right dependencies - // The actual pool creation workflow is tested in integration tests where we can - // trigger the real handleCreatePool function via component events - expect(garmApi.createRepositoryPool).toBeDefined(); - }); - - it('should show success toast after pool creation', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(RepositoryDetailsPage); - - toastStore.success( - 'Pool Created', - 'Pool has been created successfully for repository test-owner/test-repo.' - ); - - expect(toastStore.success).toHaveBeenCalledWith( - 'Pool Created', - 'Pool has been created successfully for repository test-owner/test-repo.' - ); - }); - - it('should have proper error handling structure for pool creation', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(RepositoryDetailsPage); - - // Unit tests verify the component has access to the right dependencies - // The actual error re-throwing behavior is tested through integration tests - // where we can trigger the real handleCreatePool function via component events - expect(garmApi.createRepositoryPool).toBeDefined(); - }); - }); - - describe('WebSocket Event Handling', () => { - it('should have websocket subscription capabilities', async () => { - const { websocketStore } = await import('$lib/stores/websocket.js'); - - render(RepositoryDetailsPage); - - // Verify websocket store is available and properly mocked - expect(websocketStore.subscribeToEntity).toBeDefined(); - - // Test subscription functionality - const mockHandler = vi.fn(); - const unsubscribe = websocketStore.subscribeToEntity('repository', ['update'], mockHandler); - expect(websocketStore.subscribeToEntity).toHaveBeenCalledWith('repository', ['update'], mockHandler); - expect(unsubscribe).toBeInstanceOf(Function); - }); - - it('should handle repository update events', () => { - render(RepositoryDetailsPage); - - // Component should be set up to handle repository updates - expect(document.title).toContain('Repository Details'); - }); - - it('should handle repository deletion events', () => { - render(RepositoryDetailsPage); - - // Component should handle repository deletion via websocket - expect(document.title).toContain('Repository Details'); - }); - - it('should handle pool events', () => { - render(RepositoryDetailsPage); - - // Component should handle pool CRUD events via websocket - expect(document.title).toContain('Repository Details'); - }); - - it('should handle instance events', () => { - render(RepositoryDetailsPage); - - // Component should handle instance CRUD events via websocket - expect(document.title).toContain('Repository Details'); - }); - }); - - describe('Modal Management', () => { - it('should handle update modal state', () => { - render(RepositoryDetailsPage); - - // Component should manage update modal state - expect(document.title).toContain('Repository Details'); - }); - - it('should handle delete modal state', () => { - render(RepositoryDetailsPage); - - // Component should manage delete modal state - expect(document.title).toContain('Repository Details'); - }); - - it('should handle instance delete modal state', () => { - render(RepositoryDetailsPage); - - // Component should manage instance delete modal state - expect(document.title).toContain('Repository Details'); - }); - - it('should handle create pool modal state', () => { - render(RepositoryDetailsPage); - - // Component should manage create pool modal state - expect(document.title).toContain('Repository Details'); - }); - }); - - describe('Entity Field Updates', () => { - it('should preserve events when updating entity fields', async () => { - render(RepositoryDetailsPage); - - const currentEntity = { id: 'repo-123', events: ['event1', 'event2'] }; - const updatedFields = { id: 'repo-123', name: 'updated-name' }; - - // Test the updateEntityFields logic - const result = { ...updatedFields, events: currentEntity.events }; - - expect(result.events).toEqual(['event1', 'event2']); - expect(result.name).toBe('updated-name'); - }); - - it('should handle entity field updates correctly', () => { - render(RepositoryDetailsPage); - - // Component should handle selective entity updates - expect(document.title).toContain('Repository Details'); - }); - }); - - describe('Event Scrolling', () => { - it('should handle events container scrolling', () => { - render(RepositoryDetailsPage); - - // Component should handle event scrolling functionality - expect(document.title).toContain('Repository Details'); - }); - - it('should auto-scroll when new events are added', () => { - render(RepositoryDetailsPage); - - // Component should auto-scroll on new events - expect(document.title).toContain('Repository Details'); - }); - }); - - describe('Page Parameters', () => { - it('should extract repository ID from page params', () => { - render(RepositoryDetailsPage); - - // Component should extract repo ID from page.params.id - expect(document.title).toContain('Repository Details'); - }); - - it('should handle missing repository ID', () => { - render(RepositoryDetailsPage); - - // Component should handle case when no repository ID is provided - expect(document.title).toContain('Repository Details'); - }); - }); - - describe('Utility Functions', () => { - it('should get correct forge icon', async () => { - const { getForgeIcon } = await import('$lib/utils/common.js'); - - render(RepositoryDetailsPage); - - const githubIcon = getForgeIcon('github'); - expect(getForgeIcon).toHaveBeenCalledWith('github'); - expect(githubIcon).toContain('svg'); - }); - - it('should extract API errors correctly', async () => { - const { extractAPIError } = await import('$lib/utils/apiError'); - - render(RepositoryDetailsPage); - - const error = new Error('API error'); - const extractedError = extractAPIError(error); - - expect(extractAPIError).toHaveBeenCalledWith(error); - expect(extractedError).toBe('API error'); - }); - }); - - describe('Component Lifecycle', () => { - it('should load data on mount', () => { - render(RepositoryDetailsPage); - - // Component should load repository data on mount - expect(document.title).toContain('Repository Details'); - }); - - it('should cleanup websocket subscriptions on destroy', () => { - const { unmount } = render(RepositoryDetailsPage); - - // Component should cleanup subscriptions on unmount - expect(() => unmount()).not.toThrow(); - }); - - it('should handle component initialization', () => { - const component = render(RepositoryDetailsPage); - - // Component should initialize without errors - expect(component.component).toBeDefined(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/repositories/[id]/pool-creation.test.ts b/webapp/src/routes/repositories/[id]/pool-creation.test.ts deleted file mode 100644 index 8a70f5b8..00000000 --- a/webapp/src/routes/repositories/[id]/pool-creation.test.ts +++ /dev/null @@ -1,228 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render } from '@testing-library/svelte'; -import CreatePoolModal from '$lib/components/CreatePoolModal.svelte'; - -// Mock the API client -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - getRepository: vi.fn().mockResolvedValue({ - id: 'repo123', - name: 'test-repo', - owner: 'test-owner' - }), - listRepositoryPools: vi.fn().mockResolvedValue([]), - listRepositoryInstances: vi.fn().mockResolvedValue([]), - createRepositoryPool: vi.fn().mockResolvedValue({ id: 'pool123' }), - updateRepository: vi.fn(), - deleteRepository: vi.fn(), - deleteInstance: vi.fn(), - listProviders: vi.fn().mockResolvedValue([]), - listRepositories: vi.fn().mockResolvedValue([]), - listOrganizations: vi.fn().mockResolvedValue([]), - listEnterprises: vi.fn().mockResolvedValue([]) - } -})); - -// Mock dependent components -vi.mock('$lib/components/Modal.svelte', () => ({ - default: function MockModal() { - return { $destroy: vi.fn(), $set: vi.fn(), $on: vi.fn() }; - } -})); - -vi.mock('$lib/components/JsonEditor.svelte', () => ({ - default: function MockJsonEditor() { - return { $destroy: vi.fn(), $set: vi.fn(), $on: vi.fn() }; - } -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -describe('Repository Detail Page - Pool Creation Anti-Duplication Tests', () => { - beforeEach(() => { - vi.clearAllMocks(); - }); - - describe('Modal Configuration for Entity Detail Page', () => { - it('should render CreatePoolModal with initial entity props for repository page', () => { - // Repository detail page should pass the repository context to modal - const component = render(CreatePoolModal, { - props: { - initialEntityType: 'repository', - initialEntityId: 'repo123' - } - }); - - // Component should render successfully with entity props - expect(component.container).toBeTruthy(); - }); - - it('should render modal configured for entity detail page scenario', () => { - // When initialEntityType and initialEntityId are provided, - // the modal is configured for entity detail page behavior - const component = render(CreatePoolModal, { - props: { - initialEntityType: 'repository', - initialEntityId: 'repo123' - } - }); - - // Component renders successfully with entity context - expect(component.container).toBeTruthy(); - }); - }); - - describe('Anti-Duplication Pattern for Entity Pages', () => { - it('should document entity detail page pattern to prevent duplicates', () => { - // Entity detail pages should follow this pattern: - // 1. Pass initialEntityType and initialEntityId to modal - // 2. Modal validates form and dispatches submit event - // 3. Parent component catches submit event and makes API call - // 4. Result: Exactly one API call - - const entityDetailPattern = { - step1: 'Pass initialEntityType and initialEntityId to modal', - step2: 'Modal validates form and dispatches submit event', - step3: 'Parent component makes API call on submit event', - step4: 'Result: exactly one API call per pool creation', - keyPoint: 'Modal does NOT make API call when entity props provided' - }; - - expect(entityDetailPattern.step1).toContain('Pass initialEntityType'); - expect(entityDetailPattern.step2).toContain('dispatches submit event'); - expect(entityDetailPattern.step3).toContain('Parent component makes API call'); - expect(entityDetailPattern.keyPoint).toContain('Modal does NOT make API call'); - }); - - it('should document the handleCreatePool pattern for entity pages', () => { - // Repository detail page should have logic like: - // async function handleCreatePool(event) { - // const params = event.detail; - // try { - // await garmApi.createRepositoryPool(repository.id, params); - // // Show success, close modal, refresh data - // } catch (error) { - // // Show error, keep modal open - // } - // } - - const handleCreatePoolPattern = { - trigger: 'Modal dispatches submit event with CreatePoolParams', - action: 'Parent calls garmApi.createRepositoryPool(repository.id, params)', - onSuccess: 'Close modal, show success toast, refresh pools list', - onError: 'Keep modal open, show error message to user', - duplicationPrevention: 'Only parent makes API call, not modal' - }; - - expect(handleCreatePoolPattern.trigger).toContain('Modal dispatches submit event'); - expect(handleCreatePoolPattern.action).toContain('createRepositoryPool'); - expect(handleCreatePoolPattern.duplicationPrevention).toContain('Only parent makes API call'); - }); - }); - - describe('Conditional Logic Verification', () => { - it('should verify CreatePoolModal adapts behavior based on props', () => { - // The same CreatePoolModal component behaves differently based on props: - - // Entity detail page configuration - should render successfully - const entityModal = render(CreatePoolModal, { - props: { - initialEntityType: 'repository', - initialEntityId: 'repo123' - } - }); - - // Global page configuration - should also render successfully - const globalModal = render(CreatePoolModal, { - props: {} // No initial props - }); - - // Both configurations should work but behave differently internally - expect(entityModal.container).toBeTruthy(); - expect(globalModal.container).toBeTruthy(); - }); - - it('should document the conditional logic that prevents duplicates', () => { - // The CreatePoolModal handleSubmit function contains critical logic: - // - // if (initialEntityType && initialEntityId) { - // // Entity pages: parent handles the API call - // dispatch('submit', params); - // } else { - // // Global pools page: modal handles the API call - // switch (entityLevel) { - // case 'repository': - // await garmApi.createRepositoryPool(selectedEntityId, params); - // break; - // // ... other cases - // } - // dispatch('submit', params); - // } - - const conditionalBehavior = { - condition: 'Check if initialEntityType && initialEntityId are provided', - entityPagePath: 'Only dispatch submit event, let parent handle API', - globalPagePath: 'Make API call based on entityLevel, then dispatch', - preventsDuplication: 'Ensures exactly one API call per scenario' - }; - - expect(conditionalBehavior.condition).toContain('initialEntityType && initialEntityId'); - expect(conditionalBehavior.entityPagePath).toContain('let parent handle API'); - expect(conditionalBehavior.globalPagePath).toContain('Make API call'); - expect(conditionalBehavior.preventsDuplication).toContain('exactly one API call'); - }); - }); - - describe('Integration with Repository Detail Page', () => { - it('should document modal integration prevents duplicate pool creation', () => { - // This test documents how the repository detail page integrates - // with CreatePoolModal to prevent the duplicate pool issue - - const integrationFlow = { - userAction: 'User clicks Add Pool button on repository detail page', - modalConfiguration: 'Page opens CreatePoolModal with initialEntityType="repository"', - userSubmission: 'User fills form and clicks submit', - modalResponse: 'Modal validates and dispatches submit event (no API call)', - parentResponse: 'Page handleCreatePool catches event and makes single API call', - finalResult: 'Success: exactly one pool created, modal closed', - - keyFix: 'Modal does not make API call when initialEntityType provided' - }; - - expect(integrationFlow.modalResponse).toContain('no API call'); - expect(integrationFlow.parentResponse).toContain('single API call'); - expect(integrationFlow.keyFix).toContain('Modal does not make API call'); - expect(integrationFlow.finalResult).toContain('exactly one pool created'); - }); - - it('should verify the fix resolves the original duplicate pools issue', () => { - // Original problem: "when adding a new pool, it seems that we end up with two identical pools" - // This was caused by both modal and parent making API calls - - const problemAndSolution = { - originalIssue: 'Two identical pools created when adding new pool', - rootCause: 'Both CreatePoolModal and parent component made API calls', - solutionApplied: 'Conditional API calling based on initialEntityType prop', - - beforeFix: { - modalAlways: 'Made API call regardless of context', - parentAlways: 'Handled submit event and made API call', - result: '2 API calls = 2 duplicate pools' - }, - - afterFix: { - entityPageModal: 'Dispatches submit event only (no API call)', - entityPageParent: 'Handles submit and makes single API call', - result: '1 API call = 1 pool (no duplicates)' - } - }; - - expect(problemAndSolution.rootCause).toContain('Both CreatePoolModal and parent'); - expect(problemAndSolution.afterFix.entityPageModal).toContain('no API call'); - expect(problemAndSolution.afterFix.entityPageParent).toContain('single API call'); - expect(problemAndSolution.afterFix.result).toContain('no duplicates'); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/repositories/page.integration.test.ts b/webapp/src/routes/repositories/page.integration.test.ts deleted file mode 100644 index e7654b89..00000000 --- a/webapp/src/routes/repositories/page.integration.test.ts +++ /dev/null @@ -1,514 +0,0 @@ -import { describe, it, expect, beforeEach, vi } from 'vitest'; -import { render, screen, waitFor } from '@testing-library/svelte'; -import userEvent from '@testing-library/user-event'; -import { createMockRepository, createMockGiteaRepository } from '../../test/factories.js'; - -// Create diverse test data for comprehensive testing -const mockRepositories = [ - createMockRepository({ - id: 'repo-1', - name: 'test-repo', - owner: 'test-owner', - pool_manager_status: { running: true, failure_reason: undefined } - }), - createMockGiteaRepository({ - id: 'repo-2', - name: 'gitea-repo', - owner: 'gitea-owner', - pool_manager_status: { running: false, failure_reason: undefined } - }), - createMockRepository({ - id: 'repo-3', - name: 'another-repo', - owner: 'another-owner', - pool_manager_status: { running: false, failure_reason: 'Connection failed' } - }) -]; - -const mockCredentials = [ - { name: 'github-creds' }, - { name: 'gitea-creds' } -]; - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/PageHeader.svelte'); -vi.unmock('$lib/components/DataTable.svelte'); -vi.unmock('$lib/components/CreateRepositoryModal.svelte'); -vi.unmock('$lib/components/UpdateEntityModal.svelte'); -vi.unmock('$lib/components/DeleteModal.svelte'); -vi.unmock('$lib/components/cells'); - -// Only mock the external APIs, not UI components -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - createRepository: vi.fn(), - updateRepository: vi.fn(), - deleteRepository: vi.fn(), - installRepoWebhook: vi.fn(), - listRepositories: vi.fn() - } -})); - -// Create a dynamic store that can be updated during tests -let mockStoreData = { - repositories: mockRepositories, - credentials: mockCredentials, - loaded: { repositories: true, credentials: true }, - loading: { repositories: false, credentials: false }, - errorMessages: { repositories: '', credentials: '' } -}; - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback) => { - callback(mockStoreData); - return () => {}; - }) - }, - eagerCacheManager: { - getRepositories: vi.fn(), - retryResource: vi.fn(), - getCredentials: vi.fn() - } -})); - -// Helper to update mock store data -function updateMockStore(updates: Partial) { - mockStoreData = { ...mockStoreData, ...updates }; -} - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn(), - warning: vi.fn() - } -})); - -// Import the repositories page without any UI component mocks -import RepositoriesPage from './+page.svelte'; - -describe('Comprehensive Integration Tests for Repositories Page', () => { - let garmApi: any; - - beforeEach(async () => { - vi.clearAllMocks(); - // Reset mock store data - mockStoreData = { - repositories: mockRepositories, - credentials: mockCredentials, - loaded: { repositories: true, credentials: true }, - loading: { repositories: false, credentials: false }, - errorMessages: { repositories: '', credentials: '' } - }; - - const apiClient = await import('$lib/api/client.js'); - garmApi = apiClient.garmApi; - - garmApi.createRepository.mockResolvedValue({ id: 'new-repo', name: 'new-repo' }); - garmApi.updateRepository.mockResolvedValue({}); - garmApi.deleteRepository.mockResolvedValue({}); - }); - - describe('Component Rendering and Basic Structure', () => { - it('should render repositories page with multiple repositories', async () => { - const { container } = render(RepositoriesPage); - - // Verify page title and header - expect(screen.getByText('Repositories')).toBeInTheDocument(); - expect(screen.getByText('Manage your GitHub repositories and their runners')).toBeInTheDocument(); - - // Verify all repositories are rendered (use getAllByText for duplicates) - expect(screen.getAllByText('test-owner/test-repo')[0]).toBeInTheDocument(); - expect(screen.getAllByText('gitea-owner/gitea-repo')[0]).toBeInTheDocument(); - expect(screen.getAllByText('another-owner/another-repo')[0]).toBeInTheDocument(); - - // Verify action buttons are present - const editButtons = container.querySelectorAll('[title="Edit"], [title="Edit repository"]'); - const deleteButtons = container.querySelectorAll('[title="Delete"], [title="Delete repository"]'); - expect(editButtons.length).toBeGreaterThan(0); - expect(deleteButtons.length).toBeGreaterThan(0); - }); - - it('should display correct forge icons for different repository types', async () => { - const { container } = render(RepositoriesPage); - - // GitHub repositories should have GitHub icons - const githubIcons = container.querySelectorAll('svg'); - expect(githubIcons.length).toBeGreaterThan(0); - - // Verify endpoint names are displayed (use getAllByText for duplicates in responsive layouts) - expect(screen.getAllByText('github.com')[0]).toBeInTheDocument(); - expect(screen.getAllByText('gitea.example.com')[0]).toBeInTheDocument(); - }); - - it('should display repository status correctly', async () => { - render(RepositoriesPage); - - // Verify status is displayed based on pool_manager_status - expect(screen.getByText('Repositories')).toBeInTheDocument(); - }); - - it('should have clickable repository links', async () => { - const { container } = render(RepositoriesPage); - - // Verify repository names are links - const repoLinks = container.querySelectorAll('a[href^="/repositories/"]'); - expect(repoLinks.length).toBeGreaterThan(0); - - // Check specific repository links - const repo1Link = container.querySelector('a[href="/repositories/repo-1"]'); - expect(repo1Link).toBeInTheDocument(); - expect(repo1Link?.textContent?.trim()).toBe('test-owner/test-repo'); - }); - }); - - describe('Search and Filtering Functionality', () => { - it('should filter repositories by search term', async () => { - const user = userEvent.setup(); - render(RepositoriesPage); - - // Find search input - const searchInput = screen.getByPlaceholderText('Search repositories by name or owner...'); - expect(searchInput).toBeInTheDocument(); - - // Search for 'gitea' - should filter to only gitea repository - await user.type(searchInput, 'gitea'); - - // Wait for filtering to take effect - await waitFor(() => { - // Should still show gitea repository (may appear multiple times in responsive layout) - expect(screen.getAllByText('gitea-owner/gitea-repo')[0]).toBeInTheDocument(); - }); - }); - - it('should clear search when input is cleared', async () => { - const user = userEvent.setup(); - render(RepositoriesPage); - - const searchInput = screen.getByPlaceholderText('Search repositories by name or owner...'); - - // Type search term - await user.type(searchInput, 'gitea'); - - // Clear search - await user.clear(searchInput); - - // All repositories should be visible again - await waitFor(() => { - expect(screen.getAllByText('test-owner/test-repo')[0]).toBeInTheDocument(); - expect(screen.getAllByText('gitea-owner/gitea-repo')[0]).toBeInTheDocument(); - expect(screen.getAllByText('another-owner/another-repo')[0]).toBeInTheDocument(); - }); - }); - - it('should show no results when search matches nothing', async () => { - const user = userEvent.setup(); - render(RepositoriesPage); - - const searchInput = screen.getByPlaceholderText('Search repositories by name or owner...'); - - // Search for something that doesn't exist - await user.type(searchInput, 'nonexistent-repo'); - - // Should show empty state or filtered results - await waitFor(() => { - // Search input should contain the search term - expect(searchInput).toHaveValue('nonexistent-repo'); - // Component should handle empty search results gracefully - expect(screen.getByText('Repositories')).toBeInTheDocument(); - }); - }); - }); - - describe('Pagination Controls', () => { - it('should display pagination controls with correct options', async () => { - render(RepositoriesPage); - - // Find per-page selector - const perPageSelect = screen.getByLabelText('Show:'); - expect(perPageSelect).toBeInTheDocument(); - - // Verify options are available - expect(screen.getByText('25')).toBeInTheDocument(); - expect(screen.getByText('50')).toBeInTheDocument(); - expect(screen.getByText('100')).toBeInTheDocument(); - }); - - it('should allow changing items per page', async () => { - const user = userEvent.setup(); - render(RepositoriesPage); - - const perPageSelect = screen.getByLabelText('Show:'); - - // Change to 50 items per page - await user.selectOptions(perPageSelect, '50'); - - // Verify selection changed - expect(perPageSelect).toHaveValue('50'); - }); - }); - - describe('Modal Interactions', () => { - it('should open create repository modal when add button is clicked', async () => { - const user = userEvent.setup(); - render(RepositoriesPage); - - // Find and click the "Add Repository" button - const addButton = screen.getByText('Add Repository'); - expect(addButton).toBeInTheDocument(); - - await user.click(addButton); - - // Modal should open (depending on implementation) - // This tests that the button is properly wired up - expect(addButton).toBeInTheDocument(); - }); - - it('should open edit modal when edit button is clicked', async () => { - const user = userEvent.setup(); - const { container } = render(RepositoriesPage); - - // Find edit button for first repository - const editButtons = container.querySelectorAll('[title="Edit"], [title="Edit repository"]'); - expect(editButtons.length).toBeGreaterThan(0); - - const firstEditButton = editButtons[0] as HTMLElement; - - // Test that button is clickable (button may be replaced by modal) - await user.click(firstEditButton); - - // Verify the click interaction completed successfully - // (Modal may have opened, so button might not be accessible) - // The important thing is the click didn't cause errors - expect(screen.getByText('Repositories')).toBeInTheDocument(); - }); - - it('should open delete modal when delete button is clicked', async () => { - const user = userEvent.setup(); - const { container } = render(RepositoriesPage); - - // Find delete button for first repository - const deleteButtons = container.querySelectorAll('[title="Delete"], [title="Delete repository"]'); - expect(deleteButtons.length).toBeGreaterThan(0); - - const firstDeleteButton = deleteButtons[0] as HTMLElement; - - // Test that button is clickable (button may be replaced by modal) - await user.click(firstDeleteButton); - - // Verify the click interaction completed successfully - // (Modal may have opened, so button might not be accessible) - // The important thing is the click didn't cause errors - expect(screen.getByText('Repositories')).toBeInTheDocument(); - }); - }); - - describe('Error States and Loading States', () => { - it('should handle loading state correctly', async () => { - // Update mock store to show loading state - updateMockStore({ - loading: { repositories: true, credentials: false }, - loaded: { repositories: false, credentials: true } - }); - - render(RepositoriesPage); - - // Component should handle loading state gracefully - // (exact behavior depends on implementation) - expect(document.body).toBeInTheDocument(); - }); - - it('should handle error state correctly', async () => { - // Update mock store to show error state - updateMockStore({ - errorMessages: { repositories: 'Failed to load repositories', credentials: '' }, - loaded: { repositories: false, credentials: true } - }); - - render(RepositoriesPage); - - // Component should handle error state gracefully - expect(document.body).toBeInTheDocument(); - }); - - it('should handle empty repository list', async () => { - // Update mock store to have no repositories - updateMockStore({ - repositories: [], - loaded: { repositories: true, credentials: true } - }); - - render(RepositoriesPage); - - // Should still render page structure - expect(screen.getByText('Repositories')).toBeInTheDocument(); - expect(screen.getByText('Add Repository')).toBeInTheDocument(); - }); - }); - - describe('API Integration and Data Flow', () => { - it('should handle repository creation workflow', async () => { - render(RepositoriesPage); - - // Simulate repository creation API call - const createParams = { - name: 'new-repo', - owner: 'new-owner', - credentials_name: 'github-creds', - webhook_secret: 'secret123', - pool_balancer_type: 'roundrobin' - }; - - const result = await garmApi.createRepository(createParams); - expect(garmApi.createRepository).toHaveBeenCalledWith(createParams); - expect(result).toEqual({ id: 'new-repo', name: 'new-repo' }); - }); - - it('should handle repository update workflow', async () => { - render(RepositoriesPage); - - // Simulate repository update API call - const updateParams = { webhook_secret: 'new-secret' }; - await garmApi.updateRepository('repo-1', updateParams); - expect(garmApi.updateRepository).toHaveBeenCalledWith('repo-1', updateParams); - }); - - it('should handle repository deletion workflow', async () => { - render(RepositoriesPage); - - // Simulate repository deletion API call - await garmApi.deleteRepository('repo-1'); - expect(garmApi.deleteRepository).toHaveBeenCalledWith('repo-1'); - }); - - it('should handle API errors gracefully', async () => { - render(RepositoriesPage); - - // Test different error scenarios - garmApi.createRepository.mockRejectedValue(new Error('Repository creation failed')); - garmApi.updateRepository.mockRejectedValue(new Error('Repository update failed')); - garmApi.deleteRepository.mockRejectedValue(new Error('Repository deletion failed')); - - // These should not throw unhandled errors - try { - await garmApi.createRepository({ name: 'failing-repo' }); - } catch (error: any) { - expect(error.message).toBe('Repository creation failed'); - } - }); - }); - - describe('Responsive Design and Accessibility', () => { - it('should render mobile and desktop layouts', async () => { - const { container } = render(RepositoriesPage); - - // Check for responsive classes - const mobileView = container.querySelector('.block.sm\\:hidden'); - const desktopView = container.querySelector('.hidden.sm\\:block'); - - // Both mobile and desktop views should be present - expect(mobileView || desktopView).toBeInTheDocument(); - }); - - it('should have proper accessibility attributes', async () => { - const { container } = render(RepositoriesPage); - - // Check for ARIA labels and titles - const buttonsWithAria = container.querySelectorAll('[aria-label], [title]'); - expect(buttonsWithAria.length).toBeGreaterThan(0); - - // Check for proper form labels - search input should be accessible - const searchInput = screen.getByPlaceholderText('Search repositories by name or owner...'); - expect(searchInput).toBeInTheDocument(); - - // Check for screen reader label - const searchLabel = container.querySelector('label[for="search"]'); - expect(searchLabel).toBeInTheDocument(); - }); - }); - - describe('User Interaction Flows', () => { - it('should support keyboard navigation', async () => { - const user = userEvent.setup(); - render(RepositoriesPage); - - // Test tab navigation through interactive elements - const searchInput = screen.getByPlaceholderText('Search repositories by name or owner...'); - - // Click to focus first, then test tab navigation - await user.click(searchInput); - expect(searchInput).toHaveFocus(); - - // Tab should move focus to next element - await user.tab(); - }); - - it('should handle rapid user interactions', async () => { - const user = userEvent.setup(); - render(RepositoriesPage); - - // Rapid clicking should not break the UI - const addButton = screen.getByText('Add Repository'); - - // Click multiple times rapidly - await user.click(addButton); - await user.click(addButton); - await user.click(addButton); - - // Component should remain stable - expect(addButton).toBeInTheDocument(); - }); - - it('should handle concurrent search and pagination changes', async () => { - const user = userEvent.setup(); - render(RepositoriesPage); - - const searchInput = screen.getByPlaceholderText('Search repositories by name or owner...'); - const perPageSelect = screen.getByLabelText('Show:'); - - // Perform search and pagination changes simultaneously - await user.type(searchInput, 'test'); - await user.selectOptions(perPageSelect, '50'); - - // Both changes should be applied - expect(searchInput).toHaveValue('test'); - expect(perPageSelect).toHaveValue('50'); - }); - }); - - describe('Data Consistency and State Management', () => { - it('should maintain consistent state during operations', async () => { - render(RepositoriesPage); - - // Initial state should be consistent - expect(mockStoreData.repositories).toHaveLength(3); - expect(mockStoreData.loaded.repositories).toBe(true); - expect(mockStoreData.loading.repositories).toBe(false); - }); - - it('should handle state updates correctly', async () => { - render(RepositoriesPage); - - // Simulate state changes - updateMockStore({ - loading: { repositories: true, credentials: false } - }); - - // Store should be updated - expect(mockStoreData.loading.repositories).toBe(true); - }); - - it('should handle mixed repository types correctly', async () => { - render(RepositoriesPage); - - // Should handle both GitHub and Gitea repositories - const githubRepos = mockRepositories.filter(repo => repo.endpoint?.endpoint_type === 'github'); - const giteaRepos = mockRepositories.filter(repo => repo.endpoint?.endpoint_type === 'gitea'); - - expect(githubRepos).toHaveLength(2); - expect(giteaRepos).toHaveLength(1); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/repositories/page.render.test.ts b/webapp/src/routes/repositories/page.render.test.ts deleted file mode 100644 index 962ff231..00000000 --- a/webapp/src/routes/repositories/page.render.test.ts +++ /dev/null @@ -1,152 +0,0 @@ -import { describe, it, expect, beforeEach, vi } from 'vitest'; -import { render } from '@testing-library/svelte'; -import { createMockRepository, createMockGiteaRepository } from '../../test/factories.js'; - -// Mock all the dependencies first -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - createRepository: vi.fn(), - updateRepository: vi.fn(), - deleteRepository: vi.fn(), - installRepoWebhook: vi.fn(), - listRepositories: vi.fn() - } -})); - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback) => { - callback({ - repositories: [ - createMockRepository({ name: 'test-repo-1', owner: 'owner-1' }), - createMockGiteaRepository({ name: 'gitea-repo', owner: 'owner-2' }) - ], - loaded: { repositories: true }, - loading: { repositories: false }, - errorMessages: { repositories: '' } - }); - return () => {}; - }) - }, - eagerCacheManager: { - getRepositories: vi.fn(), - retryResource: vi.fn() - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn(), - warning: vi.fn() - } -})); - -vi.mock('$lib/utils/common.js', () => ({ - getForgeIcon: vi.fn((endpointType: string) => { - if (endpointType === 'github') { - return '
                GitHub Icon
                '; - } else if (endpointType === 'gitea') { - return 'Gitea Icon'; - } - return 'Unknown Icon'; - }), - changePerPage: vi.fn((newPerPage: number) => ({ - newPerPage, - newCurrentPage: 1 - })), - getEntityStatusBadge: vi.fn((entity: any) => ({ - text: entity?.pool_manager_status?.running ? 'Running' : 'Stopped', - variant: entity?.pool_manager_status?.running ? 'success' : 'error' - })), - filterRepositories: vi.fn((repositories: any[], searchTerm: string) => { - if (!searchTerm) return repositories; - return repositories.filter((repo: any) => - repo.name.toLowerCase().includes(searchTerm.toLowerCase()) || - repo.owner.toLowerCase().includes(searchTerm.toLowerCase()) - ); - }), - paginateItems: vi.fn((items: any[], currentPage: number, perPage: number) => { - const start = (currentPage - 1) * perPage; - return items.slice(start, start + perPage); - }) -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((error: any) => { - return error?.message || 'An error occurred'; - }) -})); - -// Import the actual repositories page component after mocks -import RepositoriesPage from './+page.svelte'; - -describe('Repositories Page Rendering Tests', () => { - let eagerCacheManager: any; - - beforeEach(async () => { - vi.clearAllMocks(); - - // Setup default mock implementations - const cache = await import('$lib/stores/eager-cache.js'); - eagerCacheManager = cache.eagerCacheManager; - - eagerCacheManager.getRepositories.mockResolvedValue([]); - eagerCacheManager.retryResource.mockResolvedValue({}); - }); - - it('should render the repositories page component using testing library', () => { - // Test that render() doesn't throw errors and returns valid container - const result = render(RepositoriesPage); - - expect(result).toBeDefined(); - expect(result.container).toBeDefined(); - expect(result.component).toBeDefined(); - }); - - it('should render the page structure correctly', () => { - const { container } = render(RepositoriesPage); - - // Test that the main page structure is rendered - const spaceYDiv = container.querySelector('.space-y-6'); - expect(spaceYDiv).toBeTruthy(); - expect(spaceYDiv).toBeInTheDocument(); - }); - - it('should have correct page title in document head', () => { - render(RepositoriesPage); - - // Test that the document title is set correctly - expect(document.title).toBe('Repositories - GARM'); - }); - - it('should render without throwing errors', () => { - // Test that rendering doesn't throw any errors - expect(() => render(RepositoriesPage)).not.toThrow(); - }); - - it('should have proper component structure in DOM', () => { - const { container } = render(RepositoriesPage); - - // Test that the component creates actual DOM elements - expect(container.innerHTML).toContain('space-y-6'); - expect(container.firstChild).toBeTruthy(); - }); - - it('should successfully mount and render component in DOM', () => { - // Test that the component can be successfully mounted and rendered - const { container } = render(RepositoriesPage); - - // Verify the component is actually in the DOM - expect(container).toBeInTheDocument(); - expect(container.children.length).toBeGreaterThan(0); - }); - - it('should handle component lifecycle correctly', () => { - const { unmount } = render(RepositoriesPage); - - // Test that unmounting doesn't throw errors - expect(() => unmount()).not.toThrow(); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/repositories/page.test.ts b/webapp/src/routes/repositories/page.test.ts deleted file mode 100644 index e7b10107..00000000 --- a/webapp/src/routes/repositories/page.test.ts +++ /dev/null @@ -1,478 +0,0 @@ -import { describe, it, expect, beforeEach, vi } from 'vitest'; -import { createMockRepository, createMockGiteaRepository } from '../../test/factories.js'; -import { setupMocks, mockGarmApi, mockEagerCacheManager, mockToastStore } from '../../test/mocks.js'; - -// Mock all the dependencies first -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - createRepository: vi.fn(), - updateRepository: vi.fn(), - deleteRepository: vi.fn(), - installRepoWebhook: vi.fn(), - listRepositories: vi.fn() - } -})); - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback) => { - callback({ - repositories: [], - loaded: { repositories: false }, - loading: { repositories: false }, - errorMessages: { repositories: '' } - }); - return () => {}; - }) - }, - eagerCacheManager: { - getRepositories: vi.fn(), - retryResource: vi.fn() - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - error: vi.fn(), - info: vi.fn(), - warning: vi.fn() - } -})); - -vi.mock('$lib/utils/common.js', () => ({ - getForgeIcon: vi.fn((endpointType: string) => { - if (endpointType === 'github') { - return '
                GitHub Icon
                '; - } else if (endpointType === 'gitea') { - return 'Gitea Icon'; - } - return 'Unknown Icon'; - }), - changePerPage: vi.fn((newPerPage: number) => ({ - newPerPage, - newCurrentPage: 1 - })), - getEntityStatusBadge: vi.fn((entity: any) => ({ - text: entity?.pool_manager_status?.running ? 'Running' : 'Stopped', - variant: entity?.pool_manager_status?.running ? 'success' : 'error' - })), - filterRepositories: vi.fn((repositories: any[], searchTerm: string) => { - if (!searchTerm) return repositories; - return repositories.filter((repo: any) => - repo.name.toLowerCase().includes(searchTerm.toLowerCase()) || - repo.owner.toLowerCase().includes(searchTerm.toLowerCase()) - ); - }), - paginateItems: vi.fn((items: any[], currentPage: number, perPage: number) => { - const start = (currentPage - 1) * perPage; - return items.slice(start, start + perPage); - }) -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((error: any) => { - return error?.message || 'An error occurred'; - }) -})); - -// Import the actual repositories page component after mocks -import RepositoriesPage from './+page.svelte'; - -describe('Repositories Page Unit Tests', () => { - let garmApi: any; - let eagerCacheManager: any; - let toastStore: any; - let commonUtils: any; - - beforeEach(async () => { - vi.clearAllMocks(); - - // Get the mocked modules - const apiClient = await import('$lib/api/client.js'); - const cache = await import('$lib/stores/eager-cache.js'); - const toast = await import('$lib/stores/toast.js'); - const utils = await import('$lib/utils/common.js'); - - garmApi = apiClient.garmApi; - eagerCacheManager = cache.eagerCacheManager; - toastStore = toast.toastStore; - commonUtils = utils; - - // Setup default mock implementations - eagerCacheManager.getRepositories.mockResolvedValue([]); - eagerCacheManager.retryResource.mockResolvedValue({}); - garmApi.createRepository.mockResolvedValue({ id: 'new-repo', name: 'new-repo', owner: 'test-owner' }); - garmApi.updateRepository.mockResolvedValue({}); - garmApi.deleteRepository.mockResolvedValue({}); - garmApi.installRepoWebhook.mockResolvedValue({}); - }); - - describe('Component Structure', () => { - it('should export the repositories page component as a function', () => { - // Test that the component imports and exports correctly - expect(RepositoriesPage).toBeDefined(); - expect(typeof RepositoriesPage).toBe('function'); - }); - - it('should have the expected Svelte 5 component structure', () => { - // Svelte 5 components are functions that can be called - expect(RepositoriesPage).toBeInstanceOf(Function); - - // Test the component function exists and is callable - expect(() => RepositoriesPage).not.toThrow(); - }); - - it('should import all required dependencies', () => { - // This test validates that the component can import all its dependencies - // without throwing any module resolution errors - expect(RepositoriesPage).toBeTruthy(); - }); - }); - - describe('Component Integration', () => { - it('should import the repositories page component successfully', () => { - // Test that the component imports without errors - expect(RepositoriesPage).toBeDefined(); - expect(typeof RepositoriesPage).toBe('function'); - }); - - it('should call eagerCacheManager.getRepositories on component initialization', async () => { - // This tests that the actual onMount logic in the component would call getRepositories - eagerCacheManager.getRepositories.mockResolvedValue([]); - - // Simulate the onMount behavior directly - await eagerCacheManager.getRepositories(); - - expect(eagerCacheManager.getRepositories).toHaveBeenCalled(); - }); - - it('should validate repository data structure with actual types', () => { - const mockRepo = createMockRepository(); - - // Test that our mock data matches the actual Repository type structure - expect(mockRepo).toHaveProperty('id'); - expect(mockRepo).toHaveProperty('name'); - expect(mockRepo).toHaveProperty('owner'); - expect(mockRepo).toHaveProperty('endpoint'); - expect(mockRepo).toHaveProperty('credentials_name'); - expect(mockRepo.endpoint).toHaveProperty('endpoint_type'); - }); - - it('should handle GitHub repository data correctly', () => { - const githubRepo = createMockRepository({ - endpoint: { - name: 'github.com', - endpoint_type: 'github', - description: 'GitHub endpoint', - api_base_url: 'https://api.github.com', - base_url: 'https://github.com', - upload_base_url: 'https://uploads.github.com', - ca_cert_bundle: undefined, - created_at: '2024-01-01T00:00:00Z', - updated_at: '2024-01-01T00:00:00Z' - } - }); - - // Test that forge icon utility would be called correctly for GitHub - const icon = commonUtils.getForgeIcon(githubRepo.endpoint?.endpoint_type || 'unknown'); - expect(icon).toContain('github-icon'); - expect(commonUtils.getForgeIcon).toHaveBeenCalledWith('github'); - }); - - it('should handle Gitea repository data correctly', () => { - const giteaRepo = createMockGiteaRepository(); - - // Test that forge icon utility would be called correctly for Gitea - const icon = commonUtils.getForgeIcon(giteaRepo.endpoint?.endpoint_type || 'unknown'); - expect(icon).toContain('gitea-icon'); - expect(commonUtils.getForgeIcon).toHaveBeenCalledWith('gitea'); - }); - }); - - describe('Page Utility Functions', () => { - it('should generate correct forge icon for GitHub', () => { - const icon = commonUtils.getForgeIcon('github'); - expect(icon).toContain('github-icon'); - expect(icon).toContain('GitHub Icon'); - }); - - it('should generate correct forge icon for Gitea', () => { - const icon = commonUtils.getForgeIcon('gitea'); - expect(icon).toContain('gitea-icon'); - expect(icon).toContain('Gitea Icon'); - }); - - it('should generate fallback icon for unknown endpoint type', () => { - const icon = commonUtils.getForgeIcon('unknown'); - expect(icon).toContain('unknown-icon'); - expect(icon).toContain('Unknown Icon'); - }); - - it('should filter repositories by name', () => { - const repositories = [ - createMockRepository({ name: 'frontend-app', owner: 'company' }), - createMockRepository({ name: 'backend-api', owner: 'company' }), - createMockRepository({ name: 'mobile-app', owner: 'team' }) - ]; - - const filtered = commonUtils.filterRepositories(repositories, 'frontend'); - expect(filtered).toHaveLength(1); - expect(filtered[0].name).toBe('frontend-app'); - }); - - it('should filter repositories by owner', () => { - const repositories = [ - createMockRepository({ name: 'app1', owner: 'team-alpha' }), - createMockRepository({ name: 'app2', owner: 'team-beta' }), - createMockRepository({ name: 'app3', owner: 'team-alpha' }) - ]; - - const filtered = commonUtils.filterRepositories(repositories, 'alpha'); - expect(filtered).toHaveLength(2); - expect(filtered.every((repo: any) => repo.owner === 'team-alpha')).toBe(true); - }); - - it('should return all repositories when search term is empty', () => { - const repositories = [ - createMockRepository({ name: 'app1' }), - createMockRepository({ name: 'app2' }) - ]; - - const filtered = commonUtils.filterRepositories(repositories, ''); - expect(filtered).toHaveLength(2); - expect(filtered).toEqual(repositories); - }); - - it('should paginate items correctly', () => { - const items = Array.from({ length: 10 }, (_, i) => ({ id: i, name: `item-${i}` })); - - const page1 = commonUtils.paginateItems(items, 1, 5); - expect(page1).toHaveLength(5); - expect(page1[0].id).toBe(0); - expect(page1[4].id).toBe(4); - - const page2 = commonUtils.paginateItems(items, 2, 5); - expect(page2).toHaveLength(5); - expect(page2[0].id).toBe(5); - expect(page2[4].id).toBe(9); - }); - - it('should handle per page changes correctly', () => { - const result = commonUtils.changePerPage(50); - expect(result.newPerPage).toBe(50); - expect(result.newCurrentPage).toBe(1); - }); - - it('should generate correct status badge for running repository', () => { - const repository = createMockRepository({ - pool_manager_status: { running: true, failure_reason: undefined } - }); - - const badge = commonUtils.getEntityStatusBadge(repository); - expect(badge.text).toBe('Running'); - expect(badge.variant).toBe('success'); - }); - - it('should generate correct status badge for stopped repository', () => { - const repository = createMockRepository({ - pool_manager_status: { running: false, failure_reason: 'Manual stop' as any } - }); - - const badge = commonUtils.getEntityStatusBadge(repository); - expect(badge.text).toBe('Stopped'); - expect(badge.variant).toBe('error'); - }); - }); - - describe('Repository Data Operations', () => { - it('should call eagerCacheManager.getRepositories', async () => { - eagerCacheManager.getRepositories.mockResolvedValue([]); - - // Simulate the onMount behavior - await eagerCacheManager.getRepositories(); - - expect(eagerCacheManager.getRepositories).toHaveBeenCalled(); - }); - - it('should handle repository creation', async () => { - const newRepo = { id: 'new-repo', name: 'new-repo', owner: 'test-owner' }; - garmApi.createRepository.mockResolvedValue(newRepo); - - const repoParams = { - name: 'new-repo', - owner: 'test-owner', - credentials_name: 'test-creds', - webhook_secret: 'secret' - }; - - const result = await garmApi.createRepository(repoParams); - - expect(garmApi.createRepository).toHaveBeenCalledWith(repoParams); - expect(result).toEqual(newRepo); - }); - - it('should handle repository update', async () => { - const updateParams = { webhook_secret: 'new-secret' }; - garmApi.updateRepository.mockResolvedValue({}); - - await garmApi.updateRepository('repo-123', updateParams); - - expect(garmApi.updateRepository).toHaveBeenCalledWith('repo-123', updateParams); - }); - - it('should handle repository deletion', async () => { - garmApi.deleteRepository.mockResolvedValue({}); - - await garmApi.deleteRepository('repo-123'); - - expect(garmApi.deleteRepository).toHaveBeenCalledWith('repo-123'); - }); - - it('should handle webhook installation', async () => { - garmApi.installRepoWebhook.mockResolvedValue({}); - - await garmApi.installRepoWebhook('repo-123'); - - expect(garmApi.installRepoWebhook).toHaveBeenCalledWith('repo-123'); - }); - }); - - describe('Repository Factory Functions', () => { - it('should create a mock GitHub repository with correct properties', () => { - const repo = createMockRepository(); - - expect(repo.id).toBe('repo-123'); - expect(repo.name).toBe('test-repo'); - expect(repo.owner).toBe('test-owner'); - expect(repo.endpoint?.endpoint_type).toBe('github'); - expect(repo.endpoint?.name).toBe('github.com'); - expect(repo.credentials_name).toBe('test-credentials'); - }); - - it('should create a mock Gitea repository with correct properties', () => { - const repo = createMockGiteaRepository(); - - expect(repo.endpoint?.endpoint_type).toBe('gitea'); - expect(repo.endpoint?.name).toBe('gitea.example.com'); - expect(repo.endpoint?.api_base_url).toBe('https://gitea.example.com/api/v1'); - }); - - it('should allow overriding repository properties', () => { - const repo = createMockRepository({ - name: 'custom-repo', - owner: 'custom-owner', - credentials_name: 'custom-creds' - }); - - expect(repo.name).toBe('custom-repo'); - expect(repo.owner).toBe('custom-owner'); - expect(repo.credentials_name).toBe('custom-creds'); - }); - }); - - describe('Error Handling', () => { - it('should handle API errors with extractAPIError', async () => { - const { extractAPIError } = await import('$lib/utils/apiError'); - - const error = new Error('API request failed'); - const extractedError = extractAPIError(error); - - expect(extractedError).toBe('API request failed'); - }); - - it('should handle unknown errors with fallback message', async () => { - const { extractAPIError } = await import('$lib/utils/apiError'); - - const extractedError = extractAPIError(null); - - expect(extractedError).toBe('An error occurred'); - }); - - it('should handle repository creation errors', async () => { - const errorMessage = 'Repository creation failed'; - garmApi.createRepository.mockRejectedValue(new Error(errorMessage)); - - try { - await garmApi.createRepository({ - name: 'failing-repo', - owner: 'test-owner', - credentials_name: 'test-creds' - }); - } catch (error: any) { - expect(error.message).toBe(errorMessage); - } - - expect(garmApi.createRepository).toHaveBeenCalled(); - }); - - it('should handle webhook installation errors', async () => { - const errorMessage = 'Webhook installation failed'; - garmApi.installRepoWebhook.mockRejectedValue(new Error(errorMessage)); - - try { - await garmApi.installRepoWebhook('repo-123'); - } catch (error: any) { - expect(error.message).toBe(errorMessage); - } - - expect(garmApi.installRepoWebhook).toHaveBeenCalled(); - }); - }); - - describe('Toast Notifications', () => { - it('should show success toast for repository creation', () => { - toastStore.success('Repository Created', 'Repository test-owner/test-repo has been created successfully.'); - - expect(toastStore.success).toHaveBeenCalledWith( - 'Repository Created', - 'Repository test-owner/test-repo has been created successfully.' - ); - }); - - it('should show success toast for repository update', () => { - toastStore.success('Repository Updated', 'Repository test-owner/test-repo has been updated successfully.'); - - expect(toastStore.success).toHaveBeenCalledWith( - 'Repository Updated', - 'Repository test-owner/test-repo has been updated successfully.' - ); - }); - - it('should show success toast for repository deletion', () => { - toastStore.success('Repository Deleted', 'Repository test-owner/test-repo has been deleted successfully.'); - - expect(toastStore.success).toHaveBeenCalledWith( - 'Repository Deleted', - 'Repository test-owner/test-repo has been deleted successfully.' - ); - }); - - it('should show error toast for failures', () => { - toastStore.error('Delete Failed', 'Failed to delete repository'); - - expect(toastStore.error).toHaveBeenCalledWith( - 'Delete Failed', - 'Failed to delete repository' - ); - }); - }); - - describe('Cache Management', () => { - it('should handle cache retry', async () => { - eagerCacheManager.retryResource.mockResolvedValue({}); - - await eagerCacheManager.retryResource('repositories'); - - expect(eagerCacheManager.retryResource).toHaveBeenCalledWith('repositories'); - }); - - it('should handle cache errors', async () => { - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - - // Test that the cache subscription works - expect(eagerCache.subscribe).toBeDefined(); - expect(typeof eagerCache.subscribe).toBe('function'); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/scalesets/+page.svelte b/webapp/src/routes/scalesets/+page.svelte deleted file mode 100644 index 8b524073..00000000 --- a/webapp/src/routes/scalesets/+page.svelte +++ /dev/null @@ -1,318 +0,0 @@ - - - - Scale Sets - GARM - - -
                - - - - -
                - - -{#if showCreateModal} - showCreateModal = false} - on:submit={(e) => handleCreateScaleSet(e.detail)} - /> -{/if} - -{#if showUpdateModal && selectedScaleSet} - { showUpdateModal = false; selectedScaleSet = null; }} - on:submit={(e) => handleUpdateScaleSet(e.detail)} - /> -{/if} - -{#if showDeleteModal && selectedScaleSet} - { showDeleteModal = false; selectedScaleSet = null; }} - on:confirm={handleDeleteScaleSet} - /> -{/if} \ No newline at end of file diff --git a/webapp/src/routes/scalesets/[id]/+page.svelte b/webapp/src/routes/scalesets/[id]/+page.svelte deleted file mode 100644 index 3946f47a..00000000 --- a/webapp/src/routes/scalesets/[id]/+page.svelte +++ /dev/null @@ -1,380 +0,0 @@ - - - - {scaleSet ? `${scaleSet.name} - Scale Set Details` : 'Scale Set Details'} - GARM - - -
                - - - - {#if loading} -
                -
                -

                Loading scale set...

                -
                - {:else if error} -
                -

                {error}

                -
                - {:else if scaleSet} - - showUpdateModal = true} - onDelete={() => showDeleteModal = true} - /> - - -
                - -
                -
                -

                Basic Information

                -
                -
                -
                Scale Set ID
                -
                {scaleSet.id}
                -
                -
                -
                Name
                -
                {scaleSet.name}
                -
                -
                -
                Provider
                -
                {scaleSet.provider_name}
                -
                -
                -
                Image
                -
                - {scaleSet.image} -
                -
                -
                -
                Flavor
                -
                {scaleSet.flavor}
                -
                -
                -
                Status
                -
                - - {scaleSet.enabled ? 'Enabled' : 'Disabled'} - -
                -
                -
                -
                Entity
                -
                -
                - - {getEntityType(scaleSet)} - - - {getEntityName(scaleSet)} - -
                -
                -
                -
                -
                Created At
                -
                {formatDate(scaleSet.created_at || '')}
                -
                -
                -
                Updated At
                -
                {formatDate(scaleSet.updated_at|| '')}
                -
                -
                -
                -
                - - -
                -
                -

                Configuration

                -
                -
                -
                Max Runners
                -
                {scaleSet.max_runners}
                -
                -
                -
                Min Idle Runners
                -
                {scaleSet.min_idle_runners}
                -
                -
                -
                Bootstrap Timeout
                -
                {scaleSet.runner_bootstrap_timeout} minutes
                -
                - -
                -
                Runner Prefix
                -
                {scaleSet.runner_prefix || 'garm'}
                -
                -
                -
                OS Type / Architecture
                -
                {scaleSet.os_type} / {scaleSet.os_arch}
                -
                - {#if scaleSet['github-runner-group']} -
                -
                GitHub Runner Group
                -
                {scaleSet['github-runner-group']}
                -
                - {/if} - -
                -
                -
                -
                - - - - {#if scaleSet.extra_specs} -
                -
                -

                Extra Specifications

                -
                {formatExtraSpecs(scaleSet.extra_specs)}
                -
                -
                - {/if} - - - {#if scaleSet.instances} - - {/if} - - {/if} -
                - - -{#if showUpdateModal && scaleSet} - showUpdateModal = false} - on:submit={(e) => handleUpdate(e.detail)} - /> -{/if} - -{#if showDeleteModal && scaleSet} - showDeleteModal = false} - on:confirm={handleDelete} - /> -{/if} - -{#if showDeleteInstanceModal && selectedInstance} - { showDeleteInstanceModal = false; selectedInstance = null; }} - on:confirm={handleDeleteInstance} - /> -{/if} \ No newline at end of file diff --git a/webapp/src/routes/scalesets/page.integration.test.ts b/webapp/src/routes/scalesets/page.integration.test.ts deleted file mode 100644 index c450380a..00000000 --- a/webapp/src/routes/scalesets/page.integration.test.ts +++ /dev/null @@ -1,863 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render, screen, waitFor } from '@testing-library/svelte'; -import ScaleSetsPage from './+page.svelte'; -import { createMockScaleSet } from '../../test/factories.js'; - -// Helper function to create complete EagerCacheState objects -function createMockCacheState(overrides: any = {}) { - return { - pools: [], - repositories: [], - organizations: [], - enterprises: [], - scalesets: [], - credentials: [], - endpoints: [], - controllerInfo: null, - loaded: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: false, - credentials: false, - endpoints: false, - controllerInfo: false - }, - loading: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: false, - credentials: false, - endpoints: false, - controllerInfo: false - }, - errorMessages: { - repositories: '', - organizations: '', - enterprises: '', - pools: '', - scalesets: '', - credentials: '', - endpoints: '', - controllerInfo: '' - }, - ...overrides - }; -} - -// Mock app stores and navigation -vi.mock('$app/stores', () => ({})); -vi.mock('$app/navigation', () => ({})); - -const mockScaleSet = createMockScaleSet({ - id: 123, - name: 'test-scaleset', - repo_name: 'test-repo', - provider_name: 'hetzner', - enabled: true, - image: 'ubuntu:22.04', - flavor: 'default', - max_runners: 10, - min_idle_runners: 1, - status_messages: [ - { - message: 'Scale set started successfully', - event_level: 'info', - created_at: '2024-01-01T10:00:00Z' - }, - { - message: 'Runner pool ready', - event_level: 'info', - created_at: '2024-01-01T11:00:00Z' - }, - { - message: 'Warning: High memory usage detected', - event_level: 'warning', - created_at: '2024-01-01T12:00:00Z' - } - ] -}); - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/PageHeader.svelte'); -vi.unmock('$lib/components/DataTable.svelte'); -vi.unmock('$lib/components/CreateScaleSetModal.svelte'); -vi.unmock('$lib/components/UpdateScaleSetModal.svelte'); -vi.unmock('$lib/components/DeleteModal.svelte'); -vi.unmock('$lib/components/cells'); - -// Only mock the data layer - APIs and stores -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - updateScaleSet: vi.fn(), - deleteScaleSet: vi.fn() - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - add: vi.fn(), - error: vi.fn(), - info: vi.fn() - } -})); - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback: any) => { - callback(createMockCacheState()); - return () => {}; - }) - }, - eagerCacheManager: { - getScaleSets: vi.fn(), - retryResource: vi.fn() - } -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -vi.mock('$lib/utils/common.js', async () => { - const actual = await vi.importActual('$lib/utils/common.js') as any; - return { - ...(actual as any), - getEntityName: vi.fn((entity) => { - if (entity.repo_name) return entity.repo_name; - if (entity.org_name) return entity.org_name; - if (entity.enterprise_name) return entity.enterprise_name; - return 'Unknown'; - }), - filterEntities: vi.fn((entities, searchTerm, getNameFn) => { - if (!searchTerm) return entities; - return entities.filter((entity: any) => { - const name = getNameFn(entity); - return name.toLowerCase().includes(searchTerm.toLowerCase()); - }); - }) - }; -}); - -// Global setup for each test -let garmApi: any; -let eagerCache: any; -let eagerCacheManager: any; -let toastStore: any; - -describe('Comprehensive Integration Tests for Scale Sets Page', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up API mocks with default successful responses - const apiModule = await import('$lib/api/client.js'); - garmApi = apiModule.garmApi; - - const cacheModule = await import('$lib/stores/eager-cache.js'); - eagerCache = cacheModule.eagerCache; - eagerCacheManager = cacheModule.eagerCacheManager; - - const toastModule = await import('$lib/stores/toast.js'); - toastStore = toastModule.toastStore; - - (garmApi.updateScaleSet as any).mockResolvedValue({}); - (garmApi.deleteScaleSet as any).mockResolvedValue({}); - (eagerCacheManager.getScaleSets as any).mockResolvedValue([mockScaleSet]); - (eagerCacheManager.retryResource as any).mockResolvedValue({}); - }); - - describe('Component Rendering and Data Display', () => { - it('should render scale sets page with real components', async () => { - render(ScaleSetsPage); - - await waitFor(() => { - // Wait for data to load - expect(eagerCacheManager.getScaleSets).toHaveBeenCalled(); - }); - - // Should render the main page structure - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - expect(screen.getByText('Manage GitHub runner scale sets')).toBeInTheDocument(); - }); - - it('should display scale sets data correctly', async () => { - vi.mocked(eagerCache.subscribe).mockImplementation((callback: (state: any) => void) => { - callback(createMockCacheState({ - scalesets: [mockScaleSet], - loaded: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: true, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - await waitFor(() => { - // Wait for data loading to complete - expect(eagerCacheManager.getScaleSets).toHaveBeenCalled(); - }); - - // Should display scale set data - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - }); - - it('should render all major sections when data is loaded', async () => { - vi.mocked(eagerCache.subscribe).mockImplementation((callback: (state: any) => void) => { - callback(createMockCacheState({ - scalesets: [mockScaleSet], - loaded: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: true, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - await waitFor(() => { - expect(eagerCacheManager.getScaleSets).toHaveBeenCalled(); - }); - - // Should render main sections - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - expect(screen.getByText('Add Scale Set')).toBeInTheDocument(); - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - }); - }); - - describe('Search and Filtering Functionality', () => { - it('should filter scale sets by search term', async () => { - const mockScaleSets = [ - createMockScaleSet({ id: 1, name: 'test-scaleset-1', repo_name: 'repo-one' }), - createMockScaleSet({ id: 2, name: 'test-scaleset-2', repo_name: 'repo-two' }), - createMockScaleSet({ id: 3, name: 'prod-scaleset', repo_name: 'prod-repo' }) - ]; - - vi.mocked(eagerCache.subscribe).mockImplementation((callback: (state: any) => void) => { - callback(createMockCacheState({ - scalesets: mockScaleSets, - loaded: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: true, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - await waitFor(() => { - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - }); - - // Search functionality should be integrated - const searchInput = screen.getByPlaceholderText(/Search by entity name/i); - expect(searchInput).toBeInTheDocument(); - }); - - it('should clear search when input is cleared', async () => { - const { getEntityName, filterEntities } = await import('$lib/utils/common.js'); - - render(ScaleSetsPage); - - await waitFor(() => { - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - }); - - // Filter function should be available for clearing - expect(filterEntities).toBeDefined(); - expect(getEntityName).toBeDefined(); - }); - - it('should show no results when search matches nothing', async () => { - // Set up eager cache manager to return empty array - (eagerCacheManager.getScaleSets as any).mockResolvedValue([]); - - vi.mocked(eagerCache.subscribe).mockImplementation((callback: (state: any) => void) => { - callback(createMockCacheState({ - scalesets: [], - loaded: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: true, - credentials: false, - endpoints: false, - controllerInfo: false - }, - loading: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: false, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - await waitFor(() => { - expect(eagerCacheManager.getScaleSets).toHaveBeenCalled(); - }); - - // Wait for component to process the empty state - await waitFor(() => { - expect(screen.getByText(/No scale sets found/i)).toBeInTheDocument(); - }); - }); - }); - - describe('Pagination Controls', () => { - it('should handle pagination with multiple scale sets', async () => { - const manyScaleSets = Array.from({ length: 30 }, (_, i) => - createMockScaleSet({ - id: i + 100, // Use unique IDs starting from 100 - name: `scaleset-${i + 1}`, - repo_name: `repo-${i + 1}` - }) - ); - - vi.mocked(eagerCache.subscribe).mockImplementation((callback: (state: any) => void) => { - callback(createMockCacheState({ - scalesets: manyScaleSets, - loaded: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: true, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - await waitFor(() => { - expect(screen.getByDisplayValue('25')).toBeInTheDocument(); - }); - - // Should have pagination controls - expect(screen.getByDisplayValue('25')).toBeInTheDocument(); - }); - - it('should allow changing items per page', async () => { - render(ScaleSetsPage); - - await waitFor(() => { - expect(screen.getByDisplayValue('25')).toBeInTheDocument(); - }); - - // Per page control should be available - const perPageSelect = screen.getByDisplayValue('25'); - expect(perPageSelect).toBeInTheDocument(); - }); - }); - - describe('CRUD Operations Integration', () => { - it('should handle create scale set workflow', async () => { - render(ScaleSetsPage); - - await waitFor(() => { - expect(screen.getByText('Add Scale Set')).toBeInTheDocument(); - }); - - // Create button should be available - const createButton = screen.getByText('Add Scale Set'); - expect(createButton).toBeInTheDocument(); - }); - - it('should handle update scale set workflow', async () => { - render(ScaleSetsPage); - - await waitFor(() => { - // Wait for component to be ready - expect(garmApi.updateScaleSet).toBeDefined(); - }); - - // Update API should be available for the workflow - expect(garmApi.updateScaleSet).toBeDefined(); - }); - - it('should handle delete scale set workflow', async () => { - render(ScaleSetsPage); - - await waitFor(() => { - // Wait for component to be ready - expect(garmApi.deleteScaleSet).toBeDefined(); - }); - - // Delete API should be available for the workflow - expect(garmApi.deleteScaleSet).toBeDefined(); - }); - - it('should show success messages for CRUD operations', async () => { - render(ScaleSetsPage); - - await waitFor(() => { - expect(toastStore.success).toBeDefined(); - }); - - // Toast notifications should be integrated - expect(toastStore.success).toBeDefined(); - expect(toastStore.error).toBeDefined(); - }); - }); - - describe('Modal Integration', () => { - it('should integrate modal workflows with main page state', async () => { - render(ScaleSetsPage); - - await waitFor(() => { - expect(screen.getByText('Add Scale Set')).toBeInTheDocument(); - }); - - // Modal triggers should be integrated - expect(screen.getByText('Add Scale Set')).toBeInTheDocument(); - }); - - it('should handle modal close and state cleanup', async () => { - render(ScaleSetsPage); - - await waitFor(() => { - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - }); - - // Modal state management should be integrated - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - }); - }); - - describe('API Integration', () => { - it('should call eager cache manager when component mounts', async () => { - render(ScaleSetsPage); - - // Wait for API calls to complete and data to be displayed - await waitFor(() => { - // Verify the component actually called the cache manager to load data - expect(eagerCacheManager.getScaleSets).toHaveBeenCalled(); - }); - }); - - it('should display loading state initially then show data', async () => { - // Mock loading state initially - vi.mocked(eagerCache.subscribe).mockImplementation((callback: (state: any) => void) => { - callback(createMockCacheState({ - loading: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: true, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - // Component should render the loading state initially - expect(screen.getByText(/Loading scale sets/i)).toBeInTheDocument(); - - // Wait for eager cache manager call - await waitFor(() => { - expect(eagerCacheManager.getScaleSets).toHaveBeenCalled(); - }); - }); - - it('should handle API errors and display error state', async () => { - // Mock API to fail - const error = new Error('Failed to load scale sets'); - (eagerCacheManager.getScaleSets as any).mockRejectedValue(error); - - const { container } = render(ScaleSetsPage); - - // Wait for error to be handled - await waitFor(() => { - expect(eagerCacheManager.getScaleSets).toHaveBeenCalled(); - }); - - // Should still render page structure even when data loading fails - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - - // Should display error state in component structure - expect(container).toBeInTheDocument(); - }); - - it('should handle not found state', async () => { - // Mock cache manager to return empty array - (eagerCacheManager.getScaleSets as any).mockResolvedValue([]); - - vi.mocked(eagerCache.subscribe).mockImplementation((callback: (state: any) => void) => { - callback(createMockCacheState({ - scalesets: [], - loaded: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: true, - credentials: false, - endpoints: false, - controllerInfo: false - }, - loading: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: false, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - await waitFor(() => { - expect(eagerCacheManager.getScaleSets).toHaveBeenCalled(); - }); - - // Wait for component to process the empty state and stop loading - await waitFor(() => { - expect(screen.getByText(/No scale sets found/i)).toBeInTheDocument(); - }); - }); - }); - - describe('Eager Cache Integration', () => { - it('should subscribe to eager cache on mount', async () => { - render(ScaleSetsPage); - - // Wait for component mount - await waitFor(() => { - expect(eagerCache.subscribe).toHaveBeenCalled(); - }); - }); - - it('should handle cache data updates', async () => { - render(ScaleSetsPage); - - await waitFor(() => { - expect(eagerCache.subscribe).toHaveBeenCalled(); - }); - - // Cache subscription should be integrated for real-time updates - expect(eagerCache.subscribe).toHaveBeenCalled(); - }); - - it('should handle cache errors and display error state', async () => { - // Set up cache to fail - vi.mocked(eagerCache.subscribe).mockImplementation((callback: (state: any) => void) => { - callback(createMockCacheState({ - errorMessages: { - repositories: '', - organizations: '', - enterprises: '', - pools: '', - scalesets: 'Failed to load scale sets from cache', - credentials: '', - endpoints: '', - controllerInfo: '' - }, - loaded: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: true, // Mark as loaded so it's not loading anymore - credentials: false, - endpoints: false, - controllerInfo: false - }, - loading: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: false, // Not loading anymore, so error can be displayed - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - // Wait for loading to complete first, then check for error - await waitFor( - () => { - expect(screen.queryByText(/Loading scale sets/i)).not.toBeInTheDocument(); - }, - { timeout: 3000 } - ); - - // Now check for the cache error - await waitFor(() => { - expect(screen.getByText(/Failed to load scale sets from cache/i)).toBeInTheDocument(); - }); - - // Should display cache error - expect(screen.getByText(/Failed to load scale sets from cache/i)).toBeInTheDocument(); - }); - - it('should integrate retry functionality', async () => { - render(ScaleSetsPage); - - await waitFor(() => { - expect(eagerCacheManager.retryResource).toBeDefined(); - }); - - // Retry function should be integrated for error recovery - expect(eagerCacheManager.retryResource).toBeDefined(); - }); - }); - - describe('Error Handling Integration', () => { - it('should integrate comprehensive error handling', async () => { - // Set up various error scenarios - const error = new Error('Network error'); - (eagerCacheManager.getScaleSets as any).mockRejectedValue(error); - - render(ScaleSetsPage); - - await waitFor(() => { - // Should handle errors gracefully - expect(eagerCacheManager.getScaleSets).toHaveBeenCalled(); - }); - - // Should maintain page structure during errors - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - }); - - it('should handle API operation errors', async () => { - // Mock update to fail - const error = new Error('Update failed'); - (garmApi.updateScaleSet as any).mockRejectedValue(error); - - render(ScaleSetsPage); - - await waitFor(() => { - // Error handling should be integrated with API operations - expect(garmApi.updateScaleSet).toBeDefined(); - }); - - // API error handling should be integrated - expect(garmApi.updateScaleSet).toBeDefined(); - }); - }); - - describe('Component Integration and State Management', () => { - it('should integrate all sections with proper data flow', async () => { - render(ScaleSetsPage); - - await waitFor(() => { - // All sections should integrate properly with the main page - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - expect(eagerCacheManager.getScaleSets).toHaveBeenCalled(); - }); - - // Data flow should be properly integrated through the cache system - expect(screen.getByText('Add Scale Set')).toBeInTheDocument(); - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - }); - - it('should maintain consistent state across components', async () => { - render(ScaleSetsPage); - - await waitFor(() => { - // State should be consistent across all child components - // Data should be integrated through the cache system - expect(eagerCacheManager.getScaleSets).toHaveBeenCalled(); - }); - - // All sections should display consistent state - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - }); - - it('should handle component lifecycle correctly', () => { - const { unmount } = render(ScaleSetsPage); - - // Should unmount without errors - expect(() => unmount()).not.toThrow(); - }); - }); - - describe('Real-time Updates Integration', () => { - it('should handle real-time scale set updates through cache', async () => { - render(ScaleSetsPage); - - await waitFor(() => { - // Should handle real-time updates through eager cache - expect(eagerCache.subscribe).toHaveBeenCalled(); - }); - - // Real-time update subscription should be integrated - expect(eagerCache.subscribe).toHaveBeenCalled(); - }); - - it('should handle real-time scale set creation', async () => { - render(ScaleSetsPage); - - await waitFor(() => { - // Should handle real-time creation through cache - expect(eagerCache.subscribe).toHaveBeenCalled(); - }); - - // Creation events should be handled through cache integration - expect(eagerCache.subscribe).toHaveBeenCalled(); - }); - - it('should handle real-time scale set deletion', async () => { - render(ScaleSetsPage); - - await waitFor(() => { - // Should handle real-time deletion through cache - expect(eagerCache.subscribe).toHaveBeenCalled(); - }); - - // Deletion events should be handled through cache integration - expect(eagerCache.subscribe).toHaveBeenCalled(); - }); - }); - - describe('Accessibility and Responsive Design', () => { - it('should have proper accessibility attributes', async () => { - render(ScaleSetsPage); - - await waitFor(() => { - // Should have proper ARIA attributes and labels - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - }); - - // Should have accessible navigation elements - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - }); - - it('should be responsive across different viewport sizes', async () => { - render(ScaleSetsPage); - - await waitFor(() => { - // Should render properly across different viewport sizes - expect(eagerCacheManager.getScaleSets).toHaveBeenCalled(); - }); - - // Should have responsive layout classes - expect(document.querySelector('.space-y-6')).toBeInTheDocument(); - }); - - it('should handle screen reader compatibility', async () => { - // Ensure cache manager returns scale set data - (eagerCacheManager.getScaleSets as any).mockResolvedValue([mockScaleSet]); - - render(ScaleSetsPage); - - await waitFor(() => { - // Should be compatible with screen readers - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - }); - - // Wait for scale set data to load and display - await waitFor(() => { - expect(screen.getByText('Manage GitHub runner scale sets')).toBeInTheDocument(); - }); - }); - }); - - describe('User Interaction Flows', () => { - it('should handle complete create scale set flow', async () => { - render(ScaleSetsPage); - - await waitFor(() => { - expect(screen.getByText('Add Scale Set')).toBeInTheDocument(); - }); - - // Complete create flow should be integrated - const createButton = screen.getByText('Add Scale Set'); - expect(createButton).toBeInTheDocument(); - }); - - it('should handle complete update scale set flow', async () => { - vi.mocked(eagerCache.subscribe).mockImplementation((callback: (state: any) => void) => { - callback(createMockCacheState({ - scalesets: [mockScaleSet], - loaded: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: true, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - await waitFor(() => { - // Update workflow should be integrated - expect(garmApi.updateScaleSet).toBeDefined(); - }); - - // Update integration should be complete - expect(garmApi.updateScaleSet).toBeDefined(); - }); - - it('should handle concurrent search and pagination changes', async () => { - render(ScaleSetsPage); - - await waitFor(() => { - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - }); - - // Search and pagination should work together - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - expect(screen.getByDisplayValue('25')).toBeInTheDocument(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/scalesets/page.render.test.ts b/webapp/src/routes/scalesets/page.render.test.ts deleted file mode 100644 index 0266a4d7..00000000 --- a/webapp/src/routes/scalesets/page.render.test.ts +++ /dev/null @@ -1,528 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render, screen } from '@testing-library/svelte'; -import ScaleSetsPage from './+page.svelte'; -import { createMockScaleSet } from '../../test/factories.js'; - -// Helper function to create complete EagerCacheState objects -function createMockCacheState(overrides: any = {}) { - return { - pools: [], - repositories: [], - organizations: [], - enterprises: [], - scalesets: [], - credentials: [], - endpoints: [], - controllerInfo: null, - loaded: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: false, - credentials: false, - endpoints: false, - controllerInfo: false - }, - loading: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: false, - credentials: false, - endpoints: false, - controllerInfo: false - }, - errorMessages: { - repositories: '', - organizations: '', - enterprises: '', - pools: '', - scalesets: '', - credentials: '', - endpoints: '', - controllerInfo: '' - }, - ...overrides - }; -} - -// Mock all external dependencies -vi.mock('$app/stores', () => ({})); -vi.mock('$app/navigation', () => ({})); - -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - updateScaleSet: vi.fn(), - deleteScaleSet: vi.fn() - } -})); - -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - add: vi.fn(), - error: vi.fn(), - info: vi.fn() - } -})); - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback: any) => { - callback(createMockCacheState()); - return () => {}; - }) - }, - eagerCacheManager: { - getScaleSets: vi.fn(), - retryResource: vi.fn() - } -})); - -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -vi.mock('$lib/utils/common.js', async () => { - const actual = await vi.importActual('$lib/utils/common.js') as any; - return { - ...(actual as any), - getEntityName: vi.fn((entity) => { - if (entity.repo_name) return entity.repo_name; - if (entity.org_name) return entity.org_name; - if (entity.enterprise_name) return entity.enterprise_name; - return 'Unknown'; - }), - filterEntities: vi.fn((entities, searchTerm, getNameFn) => { - if (!searchTerm) return entities; - return entities.filter((entity: any) => { - const name = getNameFn(entity); - return name.toLowerCase().includes(searchTerm.toLowerCase()); - }); - }) - }; -}); - -const mockScaleSet = createMockScaleSet({ - id: 123, - name: 'test-scaleset', - repo_name: 'test-repo', - provider_name: 'hetzner', - enabled: true, - image: 'ubuntu:22.04', - flavor: 'default' -}); - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/PageHeader.svelte'); -vi.unmock('$lib/components/DataTable.svelte'); -vi.unmock('$lib/components/CreateScaleSetModal.svelte'); -vi.unmock('$lib/components/UpdateScaleSetModal.svelte'); -vi.unmock('$lib/components/DeleteModal.svelte'); -vi.unmock('$lib/components/cells'); - -describe('Scale Sets Page - Render Tests', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up default API mocks - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - (eagerCacheManager.getScaleSets as any).mockResolvedValue([mockScaleSet]); - }); - - describe('Basic Rendering', () => { - it('should render without crashing', () => { - const { container } = render(ScaleSetsPage); - expect(container).toBeInTheDocument(); - }); - - it('should have proper document structure', () => { - const { container } = render(ScaleSetsPage); - expect(container.querySelector('div')).toBeInTheDocument(); - }); - - it('should render page header', async () => { - render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have page header - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - }); - - it('should render data table', async () => { - render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have data table - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - }); - }); - - describe('Component Lifecycle', () => { - it('should mount successfully', () => { - const { component } = render(ScaleSetsPage); - expect(component).toBeDefined(); - }); - - it('should unmount without errors', () => { - const { unmount } = render(ScaleSetsPage); - expect(() => unmount()).not.toThrow(); - }); - - it('should handle component updates', async () => { - const { component } = render(ScaleSetsPage); - - // Component should handle reactive updates - expect(component).toBeDefined(); - }); - - it('should load scale sets on mount', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - - render(ScaleSetsPage); - - // Wait for component mount and data loading - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should call eager cache manager to load scale sets - expect(eagerCacheManager.getScaleSets).toHaveBeenCalled(); - }); - }); - - describe('DOM Structure', () => { - it('should create proper DOM hierarchy', async () => { - const { container } = render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have main container with proper spacing - const mainDiv = container.querySelector('div.space-y-6'); - expect(mainDiv).toBeInTheDocument(); - }); - - it('should render svelte:head for page title', async () => { - render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should set page title - expect(document.title).toBe('Scale Sets - GARM'); - }); - - it('should handle error display conditionally', async () => { - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - vi.mocked(eagerCache.subscribe).mockImplementation((callback) => { - callback(createMockCacheState({ - errorMessages: { - repositories: '', - organizations: '', - enterprises: '', - pools: '', - scalesets: 'Test error', - credentials: '', - endpoints: '', - controllerInfo: '' - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - // Wait for error - await new Promise(resolve => setTimeout(resolve, 100)); - - // Error display should be conditional - expect(screen.getByText(/Test error/i)).toBeInTheDocument(); - }); - - it('should render loading state initially', async () => { - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - - // Mock loading state - vi.mocked(eagerCache.subscribe).mockImplementation((callback) => { - callback(createMockCacheState({ - loading: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: true, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - // Should show loading initially - expect(screen.getByText(/Loading scale sets/i)).toBeInTheDocument(); - }); - }); - - describe('Header Section Rendering', () => { - it('should render page header with correct title', async () => { - render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should render page header - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - expect(screen.getByText('Manage GitHub runner scale sets')).toBeInTheDocument(); - }); - - it('should render create action button', async () => { - render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have create button - expect(screen.getByText('Add Scale Set')).toBeInTheDocument(); - }); - }); - - describe('Data Table Rendering', () => { - it('should render data table with scale sets', async () => { - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - - vi.mocked(eagerCache.subscribe).mockImplementation((callback) => { - callback(createMockCacheState({ - scalesets: [mockScaleSet], - loaded: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: true, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should render data table - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - }); - - it('should render search functionality', async () => { - render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have search input - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - }); - - it('should render pagination controls', async () => { - render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have pagination controls - expect(screen.getByDisplayValue('25')).toBeInTheDocument(); - }); - - it('should render empty state when no scale sets', async () => { - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - - vi.mocked(eagerCache.subscribe).mockImplementation((callback) => { - callback(createMockCacheState({ - scalesets: [], - loaded: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: true, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should show empty state - expect(screen.getByText(/No scale sets found/i)).toBeInTheDocument(); - }); - }); - - describe('Modal Rendering', () => { - it('should conditionally render create modal', async () => { - render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Create modal should not be visible initially - expect(screen.queryByText(/Create Scale Set/i)).not.toBeInTheDocument(); - }); - - it('should conditionally render update modal', async () => { - render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Update modal should not be visible initially - expect(screen.queryByText(/Update Scale Set/i)).not.toBeInTheDocument(); - }); - - it('should conditionally render delete modal', async () => { - render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Delete modal should not be visible initially - expect(screen.queryByText(/Delete Scale Set/i)).not.toBeInTheDocument(); - }); - }); - - describe('Integration Elements', () => { - it('should integrate eager cache subscription', async () => { - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - - render(ScaleSetsPage); - - // Should subscribe to eager cache - expect(eagerCache.subscribe).toHaveBeenCalled(); - }); - - it('should integrate with eager cache manager', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - - render(ScaleSetsPage); - - // Wait for component mount - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should use cache manager for loading - expect(eagerCacheManager.getScaleSets).toHaveBeenCalled(); - }); - - it('should integrate retry functionality', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - - render(ScaleSetsPage); - - // Retry function should be available - expect(eagerCacheManager.retryResource).toBeDefined(); - }); - }); - - describe('Responsive Layout', () => { - it('should use responsive layout classes', async () => { - render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have responsive layout - const container = document.querySelector('.space-y-6'); - expect(container).toBeInTheDocument(); - }); - - it('should handle mobile-friendly layout', async () => { - render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should have mobile card configuration - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - }); - }); - - describe('Component Integration', () => { - it('should integrate all major components', async () => { - render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should integrate PageHeader and DataTable - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - expect(screen.getByText('Add Scale Set')).toBeInTheDocument(); - }); - - it('should handle component communication', async () => { - render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Component should be ready for events - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - }); - }); - - describe('Error State Rendering', () => { - it('should render error states gracefully', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - - // Mock to fail - (eagerCacheManager.getScaleSets as any).mockRejectedValue(new Error('Test error')); - - render(ScaleSetsPage); - - // Wait for error handling - await new Promise(resolve => setTimeout(resolve, 100)); - - // Should render without crashing despite error - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - }); - - it('should handle cache errors in UI', async () => { - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - - vi.mocked(eagerCache.subscribe).mockImplementation((callback) => { - callback(createMockCacheState({ - errorMessages: { - repositories: '', - organizations: '', - enterprises: '', - pools: '', - scalesets: 'Cache error occurred', - credentials: '', - endpoints: '', - controllerInfo: '' - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - // Should display cache error - expect(screen.getByText(/Cache error occurred/i)).toBeInTheDocument(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/routes/scalesets/page.test.ts b/webapp/src/routes/scalesets/page.test.ts deleted file mode 100644 index 6b876f87..00000000 --- a/webapp/src/routes/scalesets/page.test.ts +++ /dev/null @@ -1,630 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { render, screen } from '@testing-library/svelte'; -import ScaleSetsPage from './+page.svelte'; -import { createMockScaleSet } from '../../test/factories.js'; - -// Helper function to create complete EagerCacheState objects -function createMockCacheState(overrides: any = {}) { - return { - pools: [], - repositories: [], - organizations: [], - enterprises: [], - scalesets: [], - credentials: [], - endpoints: [], - controllerInfo: null, - loaded: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: false, - credentials: false, - endpoints: false, - controllerInfo: false - }, - loading: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: false, - credentials: false, - endpoints: false, - controllerInfo: false - }, - errorMessages: { - repositories: '', - organizations: '', - enterprises: '', - pools: '', - scalesets: '', - credentials: '', - endpoints: '', - controllerInfo: '' - }, - ...overrides - }; -} - -// Mock the page stores -vi.mock('$app/stores', () => ({})); - -// Mock navigation -vi.mock('$app/navigation', () => ({})); - -// Mock the API client -vi.mock('$lib/api/client.js', () => ({ - garmApi: { - updateScaleSet: vi.fn(), - deleteScaleSet: vi.fn() - } -})); - -// Mock stores -vi.mock('$lib/stores/toast.js', () => ({ - toastStore: { - success: vi.fn(), - add: vi.fn(), - error: vi.fn(), - info: vi.fn() - } -})); - -vi.mock('$lib/stores/eager-cache.js', () => ({ - eagerCache: { - subscribe: vi.fn((callback: any) => { - callback(createMockCacheState()); - return () => {}; - }) - }, - eagerCacheManager: { - getScaleSets: vi.fn(), - retryResource: vi.fn() - } -})); - -// Mock utilities -vi.mock('$lib/utils/apiError', () => ({ - extractAPIError: vi.fn((err) => err.message || 'Unknown error') -})); - -vi.mock('$lib/utils/common.js', async () => { - const actual = await vi.importActual('$lib/utils/common.js') as any; - return { - ...(actual as any), - getEntityName: vi.fn((entity) => { - if (entity.repo_name) return entity.repo_name; - if (entity.org_name) return entity.org_name; - if (entity.enterprise_name) return entity.enterprise_name; - return 'Unknown'; - }), - filterEntities: vi.fn((entities, searchTerm, getNameFn) => { - if (!searchTerm) return entities; - return entities.filter((entity: any) => { - const name = getNameFn(entity); - return name.toLowerCase().includes(searchTerm.toLowerCase()); - }); - }) - }; -}); - -// Reset any component mocks that might be set by setup.ts -vi.unmock('$lib/components/PageHeader.svelte'); -vi.unmock('$lib/components/DataTable.svelte'); -vi.unmock('$lib/components/CreateScaleSetModal.svelte'); -vi.unmock('$lib/components/UpdateScaleSetModal.svelte'); -vi.unmock('$lib/components/DeleteModal.svelte'); -vi.unmock('$lib/components/cells'); - -const mockScaleSet = createMockScaleSet({ - id: 123, - name: 'test-scaleset', - repo_name: 'test-repo', - provider_name: 'hetzner', - enabled: true, - image: 'ubuntu:22.04', - flavor: 'default', - max_runners: 10, - min_idle_runners: 1 -}); - -describe('Scale Sets Page - Unit Tests', () => { - beforeEach(async () => { - vi.clearAllMocks(); - - // Set up default eager cache manager mock - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - (eagerCacheManager.getScaleSets as any).mockResolvedValue([mockScaleSet]); - }); - - describe('Component Initialization', () => { - it('should render successfully', () => { - const { container } = render(ScaleSetsPage); - expect(container).toBeInTheDocument(); - }); - - it('should set page title', () => { - render(ScaleSetsPage); - expect(document.title).toBe('Scale Sets - GARM'); - }); - - it('should load scale sets on mount', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - - render(ScaleSetsPage); - - // Wait for component mount - await new Promise(resolve => setTimeout(resolve, 0)); - - expect(eagerCacheManager.getScaleSets).toHaveBeenCalled(); - }); - }); - - describe('Data Loading', () => { - it('should handle loading state', async () => { - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - - // Mock loading state - vi.mocked(eagerCache.subscribe).mockImplementation((callback) => { - callback(createMockCacheState({ - loading: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: true, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - // Should show loading indicator - expect(screen.getByText(/Loading scale sets/i)).toBeInTheDocument(); - }); - - it('should handle API error state', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - - // Mock API to fail - const error = new Error('Failed to load scale sets'); - (eagerCacheManager.getScaleSets as any).mockRejectedValue(error); - - render(ScaleSetsPage); - - // Wait for the error to be handled - await new Promise(resolve => setTimeout(resolve, 100)); - - // Should handle error gracefully - expect(eagerCacheManager.getScaleSets).toHaveBeenCalled(); - }); - }); - - describe('Scale Sets Display', () => { - it('should display scale sets in data table', async () => { - const mockScaleSets = [mockScaleSet]; - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - - // Mock cache with scale sets data - vi.mocked(eagerCache.subscribe).mockImplementation((callback) => { - callback(createMockCacheState({ - scalesets: mockScaleSets, - loaded: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: true, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - // Wait for data to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should display scale sets table - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - }); - - it('should handle empty scale sets list', async () => { - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - - // Mock cache with empty data - vi.mocked(eagerCache.subscribe).mockImplementation((callback) => { - callback(createMockCacheState({ - scalesets: [], - loaded: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: true, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - // Wait for data to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Should show empty state - expect(screen.getByText(/No scale sets found/i)).toBeInTheDocument(); - }); - }); - - describe('Eager Cache Integration', () => { - it('should subscribe to eager cache', async () => { - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - - render(ScaleSetsPage); - - // Should subscribe to cache - expect(eagerCache.subscribe).toHaveBeenCalled(); - }); - - it('should handle cache data updates', async () => { - const mockScaleSets = [mockScaleSet]; - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - - // Mock cache with scale sets data - vi.mocked(eagerCache.subscribe).mockImplementation((callback) => { - callback(createMockCacheState({ - scalesets: mockScaleSets, - loaded: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: true, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - // Component should handle cache updates - expect(eagerCache.subscribe).toHaveBeenCalled(); - }); - - it('should handle cache error states', async () => { - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - - // Mock cache with error - vi.mocked(eagerCache.subscribe).mockImplementation((callback) => { - callback(createMockCacheState({ - errorMessages: { - repositories: '', - organizations: '', - enterprises: '', - pools: '', - scalesets: 'Failed to load scale sets', - credentials: '', - endpoints: '', - controllerInfo: '' - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - // Should handle cache errors - expect(screen.getByText(/Failed to load scale sets/i)).toBeInTheDocument(); - }); - - it('should handle cache error states', async () => { - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - - // Mock loading error state - vi.mocked(eagerCache.subscribe).mockImplementation((callback) => { - callback(createMockCacheState({ - errorMessages: { - repositories: '', - organizations: '', - enterprises: '', - pools: '', - scalesets: 'Failed to load scale sets from cache', - credentials: '', - endpoints: '', - controllerInfo: '' - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - // Should display error - expect(screen.getByText(/Failed to load scale sets from cache/i)).toBeInTheDocument(); - }); - }); - - describe('Search and Filtering', () => { - it('should handle search functionality', async () => { - render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Search functionality should be available - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - }); - - it('should handle pagination calculations', async () => { - // Mock eager cache with loading state - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - vi.mocked(eagerCache.subscribe).mockImplementation((callback: any) => { - callback(createMockCacheState({ - loading: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: true, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - // Should show loading state - expect(screen.getByText(/Loading scale sets/i)).toBeInTheDocument(); - - // Pagination controls should be available - expect(screen.getByDisplayValue('25')).toBeInTheDocument(); - }); - - it('should filter scale sets by search term', async () => { - render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Search input should be available for search events - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - }); - }); - - describe('Event Handling', () => { - it('should handle table search events', async () => { - // Mock eager cache with loading state - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - vi.mocked(eagerCache.subscribe).mockImplementation((callback: any) => { - callback(createMockCacheState({ - loading: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: true, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - // Should show loading state - expect(screen.getByText(/Loading scale sets/i)).toBeInTheDocument(); - - // Search input should be available for search events - expect(screen.getByPlaceholderText(/Search by entity name/i)).toBeInTheDocument(); - }); - - it('should handle table pagination events', async () => { - // Mock eager cache with loading state - const { eagerCache } = await import('$lib/stores/eager-cache.js'); - vi.mocked(eagerCache.subscribe).mockImplementation((callback: any) => { - callback(createMockCacheState({ - loading: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: true, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - // Should show loading state - expect(screen.getByText(/Loading scale sets/i)).toBeInTheDocument(); - - // Pagination controls should be integrated - expect(screen.getByDisplayValue('25')).toBeInTheDocument(); - }); - - it('should handle edit events', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(ScaleSetsPage); - - // Component should handle edit events from DataTable - expect(garmApi.updateScaleSet).toBeDefined(); - - // Edit infrastructure should be ready - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - }); - - it('should handle delete events', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(ScaleSetsPage); - - // Component should handle delete events from DataTable - expect(garmApi.deleteScaleSet).toBeDefined(); - - // Delete infrastructure should be ready - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - }); - - it('should handle retry events', async () => { - const { eagerCacheManager, eagerCache } = await import('$lib/stores/eager-cache.js'); - - // Mock eager cache with loading state - vi.mocked(eagerCache.subscribe).mockImplementation((callback: any) => { - callback(createMockCacheState({ - loading: { - repositories: false, - organizations: false, - enterprises: false, - pools: false, - scalesets: true, - credentials: false, - endpoints: false, - controllerInfo: false - } - })); - return () => {}; - }); - - render(ScaleSetsPage); - - // Component should handle retry events from DataTable - expect(eagerCacheManager.retryResource).toBeDefined(); - - // Retry infrastructure should be ready - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - }); - }); - - describe('Modal Management', () => { - it('should handle create modal state', async () => { - render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Create button should be available - expect(screen.getByText('Add Scale Set')).toBeInTheDocument(); - }); - - it('should handle update modal state', async () => { - render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Modal infrastructure should be ready - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - }); - - it('should handle delete modal state', async () => { - render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Modal infrastructure should be ready - expect(screen.getByRole('heading', { name: 'Scale Sets' })).toBeInTheDocument(); - }); - }); - - describe('CRUD Operations', () => { - it('should handle create scale set', async () => { - render(ScaleSetsPage); - - // Wait for component to load - await new Promise(resolve => setTimeout(resolve, 0)); - - // Create functionality should be available - expect(screen.getByText('Add Scale Set')).toBeInTheDocument(); - }); - - it('should handle update scale set', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(ScaleSetsPage); - - // Update functionality should be available - expect(garmApi.updateScaleSet).toBeDefined(); - }); - - it('should handle delete scale set', async () => { - const { garmApi } = await import('$lib/api/client.js'); - - render(ScaleSetsPage); - - // Delete functionality should be available - expect(garmApi.deleteScaleSet).toBeDefined(); - }); - }); - - describe('Toast Integration', () => { - it('should show success messages for CRUD operations', async () => { - const { toastStore } = await import('$lib/stores/toast.js'); - - render(ScaleSetsPage); - - // Toast store should be available for success messages - expect(toastStore.success).toBeDefined(); - expect(toastStore.error).toBeDefined(); - }); - }); - - describe('Component Lifecycle', () => { - it('should mount successfully', () => { - const component = render(ScaleSetsPage); - expect(component.component).toBeDefined(); - }); - - it('should unmount without errors', () => { - const { unmount } = render(ScaleSetsPage); - expect(() => unmount()).not.toThrow(); - }); - }); - - describe('Error Handling', () => { - it('should handle mount errors gracefully', async () => { - const { eagerCacheManager } = await import('$lib/stores/eager-cache.js'); - - // Mock mount to fail - const error = new Error('Mount failed'); - (eagerCacheManager.getScaleSets as any).mockRejectedValue(error); - - expect(() => render(ScaleSetsPage)).not.toThrow(); - }); - - it('should handle API errors during operations', async () => { - const { extractAPIError } = await import('$lib/utils/apiError'); - - render(ScaleSetsPage); - - // Error handling should be available - expect(extractAPIError).toBeDefined(); - }); - }); -}); \ No newline at end of file diff --git a/webapp/src/test/factories.ts b/webapp/src/test/factories.ts deleted file mode 100644 index db340b71..00000000 --- a/webapp/src/test/factories.ts +++ /dev/null @@ -1,261 +0,0 @@ -import type { Repository, Organization, Enterprise, Instance, Pool, ScaleSet, ForgeCredentials, EndpointType, ForgeEndpoint } from '$lib/api/generated/api.js'; - -export function createMockRepository(overrides: Partial = {}): Repository { - return { - id: 'repo-123', - name: 'test-repo', - owner: 'test-owner', - created_at: '2024-01-01T00:00:00Z', - updated_at: '2024-01-01T00:00:00Z', - credentials_name: 'test-credentials', - credentials_id: 1, - credentials: createMockCredentials(), - endpoint: { - name: 'github.com', - endpoint_type: 'github' as EndpointType, - description: 'GitHub endpoint', - api_base_url: 'https://api.github.com', - base_url: 'https://github.com', - upload_base_url: 'https://uploads.github.com', - ca_cert_bundle: null, - created_at: '2024-01-01T00:00:00Z', - updated_at: '2024-01-01T00:00:00Z' - }, - pool_manager_status: { - running: true, - failure_reason: null - }, - ...overrides - }; -} - -export function createMockCredentials(overrides: Partial = {}): ForgeCredentials { - return { - id: Math.floor(Math.random() * 10000), - name: 'test-credentials', - description: 'Test credentials', - endpoint_name: 'github.com', - created_at: '2024-01-01T00:00:00Z', - updated_at: '2024-01-01T00:00:00Z', - ...overrides - }; -} - -export function createMockGiteaRepository(overrides: Partial = {}): Repository { - return createMockRepository({ - endpoint: { - name: 'gitea.example.com', - endpoint_type: 'gitea' as EndpointType, - description: 'Gitea endpoint', - api_base_url: 'https://gitea.example.com/api/v1', - base_url: 'https://gitea.example.com', - upload_base_url: null, - ca_cert_bundle: null, - created_at: '2024-01-01T00:00:00Z', - updated_at: '2024-01-01T00:00:00Z' - }, - ...overrides - }); -} - -export function createMockOrganization(overrides: Partial = {}): Organization { - return { - id: 'org-123', - name: 'test-org', - created_at: '2024-01-01T00:00:00Z', - updated_at: '2024-01-01T00:00:00Z', - credentials_name: 'test-credentials', - credentials_id: 1, - credentials: createMockCredentials(), - endpoint: { - name: 'github.com', - endpoint_type: 'github' as EndpointType, - description: 'GitHub endpoint', - api_base_url: 'https://api.github.com', - base_url: 'https://github.com', - upload_base_url: 'https://uploads.github.com', - ca_cert_bundle: null, - created_at: '2024-01-01T00:00:00Z', - updated_at: '2024-01-01T00:00:00Z' - }, - pool_manager_status: { - running: true, - failure_reason: null - }, - ...overrides - }; -} - -export function createMockGiteaOrganization(overrides: Partial = {}): Organization { - return createMockOrganization({ - endpoint: { - name: 'gitea.example.com', - endpoint_type: 'gitea' as EndpointType, - description: 'Gitea endpoint', - api_base_url: 'https://gitea.example.com/api/v1', - base_url: 'https://gitea.example.com', - upload_base_url: null, - ca_cert_bundle: null, - created_at: '2024-01-01T00:00:00Z', - updated_at: '2024-01-01T00:00:00Z' - }, - ...overrides - }); -} - -export function createMockEnterprise(overrides: Partial = {}): Enterprise { - return { - id: 'ent-123', - name: 'test-enterprise', - created_at: '2024-01-01T00:00:00Z', - updated_at: '2024-01-01T00:00:00Z', - credentials_name: 'test-credentials', - credentials_id: 1, - credentials: createMockCredentials(), - endpoint: { - name: 'github.com', - endpoint_type: 'github' as EndpointType, - description: 'GitHub endpoint', - api_base_url: 'https://api.github.com', - base_url: 'https://github.com', - upload_base_url: 'https://uploads.github.com', - ca_cert_bundle: null, - created_at: '2024-01-01T00:00:00Z', - updated_at: '2024-01-01T00:00:00Z' - }, - pool_manager_status: { - running: true, - failure_reason: null - }, - ...overrides - }; -} - -export function createMockPool(overrides: Partial = {}): Pool { - return { - id: 'pool-123', - created_at: '2024-01-01T00:00:00Z', - updated_at: '2024-01-01T00:00:00Z', - enabled: true, - image: 'ubuntu:22.04', - flavor: 'default', - max_runners: 10, - min_idle_runners: 1, - os_arch: 'amd64', - os_type: 'linux', - priority: 100, - provider_name: 'test-provider', - runner_bootstrap_timeout: 20, - runner_prefix: 'garm', - tags: ['ubuntu', 'test'], - repo_id: 'repo-123', - ...overrides - }; -} - -export function createMockInstance(overrides: Partial = {}): Instance { - return { - id: 'inst-123', - name: 'test-instance', - created_at: '2024-01-01T00:00:00Z', - updated_at: '2024-01-01T00:00:00Z', - agent_id: 12345, - pool_id: 'pool-123', - provider_id: 'prov-123', - os_type: 'linux', - os_name: 'ubuntu', - os_arch: 'amd64', - status: 'running', - runner_status: 'idle', - addresses: [ - { address: '192.168.1.100', type: 'private' } - ], - ...overrides - }; -} - -export function createMockForgeEndpoint(overrides: Partial = {}): ForgeEndpoint { - return { - name: 'github.com', - description: 'GitHub.com endpoint', - endpoint_type: 'github', - base_url: 'https://github.com', - api_base_url: 'https://api.github.com', - upload_base_url: 'https://uploads.github.com', - ca_cert_bundle: null, - created_at: '2024-01-01T00:00:00Z', - updated_at: '2024-01-01T00:00:00Z', - ...overrides - }; -} - -export function createMockGiteaEndpoint(overrides: Partial = {}): ForgeEndpoint { - return createMockForgeEndpoint({ - name: 'gitea.example.com', - description: 'Gitea endpoint', - endpoint_type: 'gitea', - base_url: 'https://gitea.example.com', - api_base_url: 'https://gitea.example.com/api/v1', - upload_base_url: null, - ...overrides - }); -} - -export function createMockGithubCredentials(overrides: Partial = {}): ForgeCredentials { - return createMockCredentials({ - forge_type: 'github', - 'auth-type': 'pat', - endpoint: createMockForgeEndpoint(), - ...overrides - }); -} - -export function createMockGiteaCredentials(overrides: Partial = {}): ForgeCredentials { - return createMockCredentials({ - forge_type: 'gitea', - 'auth-type': 'pat', - endpoint: createMockGiteaEndpoint(), - ...overrides - }); -} - -export function createMockScaleSet(overrides: Partial = {}): ScaleSet { - return { - id: 123, - name: 'test-scaleset', - created_at: '2024-01-01T00:00:00Z', - updated_at: '2024-01-01T00:00:00Z', - enabled: true, - image: 'ubuntu:22.04', - flavor: 'default', - max_runners: 10, - min_idle_runners: 1, - os_arch: 'amd64', - os_type: 'linux', - provider_name: 'test-provider', - runner_bootstrap_timeout: 20, - runner_prefix: 'garm', - repo_id: 'repo-123', - repo_name: 'test-repo', - scale_set_id: 8, - state: 'active', - desired_runner_count: 5, - disable_update: false, - 'github-runner-group': 'default', - extra_specs: {}, - endpoint: { - name: 'github.com', - endpoint_type: 'github' as EndpointType, - description: 'GitHub endpoint', - api_base_url: 'https://api.github.com', - base_url: 'https://github.com', - upload_base_url: 'https://uploads.github.com', - ca_cert_bundle: null, - created_at: '2024-01-01T00:00:00Z', - updated_at: '2024-01-01T00:00:00Z' - }, - instances: [], - status_messages: [], - ...overrides - }; -} \ No newline at end of file diff --git a/webapp/src/test/mocks.ts b/webapp/src/test/mocks.ts deleted file mode 100644 index 893251f1..00000000 --- a/webapp/src/test/mocks.ts +++ /dev/null @@ -1,51 +0,0 @@ -import { vi } from 'vitest'; -import type { Repository, CreateRepoParams, UpdateEntityParams } from '$lib/api/generated/api.js'; - -// Mock the API client -export const mockGarmApi = { - createRepository: vi.fn(), - updateRepository: vi.fn(), - deleteRepository: vi.fn(), - installRepoWebhook: vi.fn(), - listRepositories: vi.fn() -}; - -// Mock the eager cache -export const mockEagerCache = { - repositories: [] as any[], - loaded: { - repositories: false - }, - loading: { - repositories: false - }, - errorMessages: { - repositories: '' - } -}; - -export const mockEagerCacheManager = { - getRepositories: vi.fn(), - retryResource: vi.fn() -}; - -// Mock the toast store -export const mockToastStore = { - success: vi.fn(), - error: vi.fn(), - info: vi.fn(), - warning: vi.fn() -}; - -// Setup common mocks -export function setupMocks() { - vi.clearAllMocks(); - - // Reset mock implementations - mockGarmApi.createRepository.mockResolvedValue({ id: 'new-repo', name: 'new-repo', owner: 'test-owner' }); - mockGarmApi.updateRepository.mockResolvedValue({}); - mockGarmApi.deleteRepository.mockResolvedValue({}); - mockGarmApi.installRepoWebhook.mockResolvedValue({}); - mockEagerCacheManager.getRepositories.mockResolvedValue([]); - mockEagerCacheManager.retryResource.mockResolvedValue({}); -} \ No newline at end of file diff --git a/webapp/src/test/setup.ts b/webapp/src/test/setup.ts deleted file mode 100644 index f8d9e53e..00000000 --- a/webapp/src/test/setup.ts +++ /dev/null @@ -1,191 +0,0 @@ -import '@testing-library/jest-dom'; - -// Mock SvelteKit runtime modules -import { vi } from 'vitest'; - -// Mock SvelteKit stores -vi.mock('$app/stores', () => ({ - page: { - subscribe: vi.fn(() => () => {}) - } -})); - -// Mock SvelteKit paths -vi.mock('$app/paths', () => ({ - resolve: vi.fn((path: string) => path) -})); - -// Mock SvelteKit environment - Set browser to true for client-side rendering -vi.mock('$app/environment', () => ({ - browser: true, - dev: true, - building: false, - version: 'test' -})); - -// Simple component mocks that render as basic divs -vi.mock('$lib/components/CreateRepositoryModal.svelte', () => ({ - default: function MockCreateRepositoryModal(options: any) { - const target = options.target; - if (target) { - const div = document.createElement('div'); - div.setAttribute('data-testid', 'create-repository-modal'); - div.textContent = 'Create Repository Modal'; - target.appendChild(div); - } - return { - $destroy: vi.fn(), - $set: vi.fn(), - $on: vi.fn() - }; - } -})); - -vi.mock('$lib/components/UpdateEntityModal.svelte', () => ({ - default: function MockUpdateEntityModal(options: any) { - const target = options.target; - if (target) { - const div = document.createElement('div'); - div.setAttribute('data-testid', 'update-entity-modal'); - div.textContent = 'Update Entity Modal'; - target.appendChild(div); - } - return { - $destroy: vi.fn(), - $set: vi.fn(), - $on: vi.fn() - }; - } -})); - -vi.mock('$lib/components/DeleteModal.svelte', () => ({ - default: function MockDeleteModal(options: any) { - const target = options.target; - if (target) { - const div = document.createElement('div'); - div.setAttribute('data-testid', 'delete-modal'); - div.textContent = 'Delete Modal'; - target.appendChild(div); - } - return { - $destroy: vi.fn(), - $set: vi.fn(), - $on: vi.fn() - }; - } -})); - -vi.mock('$lib/components/PageHeader.svelte', () => ({ - default: function MockPageHeader(options: any) { - const target = options.target; - if (target) { - const div = document.createElement('div'); - // Extract title from props or use generic title - const props = options.props || {}; - const title = props.title || 'Runner Instances'; - const showAction = props.showAction !== false; - const actionText = props.actionText || 'Add'; - - let html = `

                ${title}

                `; - if (showAction) { - html += ``; - } - div.innerHTML = html; - target.appendChild(div); - } - return { - $destroy: vi.fn(), - $set: vi.fn(), - $on: vi.fn() - }; - } -})); - -vi.mock('$lib/components/DataTable.svelte', () => ({ - default: function MockDataTable(options: any) { - const target = options.target; - if (target) { - const div = document.createElement('div'); - div.setAttribute('data-testid', 'data-table'); - - // Extract search placeholder from props - const props = options.props || {}; - const searchPlaceholder = props.searchPlaceholder || 'Search...'; - - div.innerHTML = ` -
                DataTable Component
                - - `; - target.appendChild(div); - } - return { - $destroy: vi.fn(), - $set: vi.fn(), - $on: vi.fn() - }; - } -})); - -// Mock cell components -vi.mock('$lib/components/cells', () => ({ - EntityCell: function MockEntityCell(options: any) { - const target = options.target; - if (target) { - const div = document.createElement('div'); - div.setAttribute('data-testid', 'entity-cell'); - div.textContent = 'Entity Cell'; - target.appendChild(div); - } - return { $destroy: vi.fn(), $set: vi.fn(), $on: vi.fn() }; - }, - EndpointCell: function MockEndpointCell(options: any) { - const target = options.target; - if (target) { - const div = document.createElement('div'); - div.setAttribute('data-testid', 'endpoint-cell'); - div.textContent = 'Endpoint Cell'; - target.appendChild(div); - } - return { $destroy: vi.fn(), $set: vi.fn(), $on: vi.fn() }; - }, - StatusCell: function MockStatusCell(options: any) { - const target = options.target; - if (target) { - const div = document.createElement('div'); - div.setAttribute('data-testid', 'status-cell'); - div.textContent = 'Status Cell'; - target.appendChild(div); - } - return { $destroy: vi.fn(), $set: vi.fn(), $on: vi.fn() }; - }, - ActionsCell: function MockActionsCell(options: any) { - const target = options.target; - if (target) { - const div = document.createElement('div'); - div.setAttribute('data-testid', 'actions-cell'); - div.textContent = 'Actions Cell'; - target.appendChild(div); - } - return { $destroy: vi.fn(), $set: vi.fn(), $on: vi.fn() }; - }, - GenericCell: function MockGenericCell(options: any) { - const target = options.target; - if (target) { - const div = document.createElement('div'); - div.setAttribute('data-testid', 'generic-cell'); - div.textContent = 'Generic Cell'; - target.appendChild(div); - } - return { $destroy: vi.fn(), $set: vi.fn(), $on: vi.fn() }; - }, - InstancePoolCell: function MockInstancePoolCell(options: any) { - const target = options.target; - if (target) { - const div = document.createElement('div'); - div.setAttribute('data-testid', 'instance-pool-cell'); - div.textContent = 'Instance Pool Cell'; - target.appendChild(div); - } - return { $destroy: vi.fn(), $set: vi.fn(), $on: vi.fn() }; - } -})); \ No newline at end of file diff --git a/webapp/static/assets/garm-dark.svg b/webapp/static/assets/garm-dark.svg deleted file mode 100644 index f0a0c564..00000000 --- a/webapp/static/assets/garm-dark.svg +++ /dev/null @@ -1,37 +0,0 @@ - - - - - - - - - - - diff --git a/webapp/static/assets/garm-light.svg b/webapp/static/assets/garm-light.svg deleted file mode 100644 index 2495959d..00000000 --- a/webapp/static/assets/garm-light.svg +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - - - - - diff --git a/webapp/static/assets/gitea.svg b/webapp/static/assets/gitea.svg deleted file mode 100644 index e4643ce3..00000000 --- a/webapp/static/assets/gitea.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/webapp/static/assets/github-mark-white.svg b/webapp/static/assets/github-mark-white.svg deleted file mode 100644 index d5e64918..00000000 --- a/webapp/static/assets/github-mark-white.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/webapp/static/assets/github-mark.svg b/webapp/static/assets/github-mark.svg deleted file mode 100644 index 37fa923d..00000000 --- a/webapp/static/assets/github-mark.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/webapp/static/favicon-dark.png b/webapp/static/favicon-dark.png deleted file mode 100644 index d16186d1..00000000 Binary files a/webapp/static/favicon-dark.png and /dev/null differ diff --git a/webapp/static/favicon-light.png b/webapp/static/favicon-light.png deleted file mode 100644 index 5390c2f9..00000000 Binary files a/webapp/static/favicon-light.png and /dev/null differ diff --git a/webapp/svelte.config.js b/webapp/svelte.config.js deleted file mode 100644 index 08ce32ed..00000000 --- a/webapp/svelte.config.js +++ /dev/null @@ -1,25 +0,0 @@ -import adapter from '@sveltejs/adapter-static'; -import { vitePreprocess } from '@sveltejs/vite-plugin-svelte'; - -// Use different base paths for development vs production -const isDev = process.env.NODE_ENV === 'development'; -const basePath = isDev ? '' : '/ui'; - -/** @type {import('@sveltejs/kit').Config} */ -const config = { - preprocess: vitePreprocess(), - kit: { - adapter: adapter({ - pages: 'build', - assets: 'build', - fallback: 'index.html', - precompress: false, - strict: true - }), - paths: { - base: basePath - } - } -}; - -export default config; diff --git a/webapp/swagger.yaml b/webapp/swagger.yaml deleted file mode 100644 index 5cba229e..00000000 --- a/webapp/swagger.yaml +++ /dev/null @@ -1,3469 +0,0 @@ -basePath: /api/v1 -consumes: - - application/json -definitions: - APIErrorResponse: - properties: - details: - type: string - x-go-name: Details - error: - type: string - x-go-name: Error - type: object - x-go-package: github.com/cloudbase/garm/apiserver/params - Address: - properties: - address: - type: string - x-go-name: Address - type: - $ref: '#/definitions/AddressType' - type: object - x-go-package: github.com/cloudbase/garm-provider-common/params - AddressType: - type: string - x-go-package: github.com/cloudbase/garm-provider-common/params - ControllerInfo: - properties: - callback_url: - description: |- - CallbackURL is the URL where instances can send updates back to the controller. - This URL is used by instances to send status updates back to the controller. The - URL itself may be made available to instances via a reverse proxy or a load balancer. - That means that the user is responsible for telling GARM what the public URL is, by - setting this field. - type: string - x-go-name: CallbackURL - controller_id: - description: |- - ControllerID is the unique ID of this controller. This ID gets generated - automatically on controller init. - format: uuid - type: string - x-go-name: ControllerID - controller_webhook_url: - description: |- - ControllerWebhookURL is the controller specific URL where webhooks will be received. - This field holds the WebhookURL defined above to which we append the ControllerID. - Functionally it is the same as WebhookURL, but it allows us to safely manage webhooks - from GARM without accidentally removing webhooks from other services or GARM controllers. - type: string - x-go-name: ControllerWebhookURL - hostname: - description: |- - Hostname is the hostname of the machine that runs this controller. In the - future, this field will be migrated to a separate table that will keep track - of each the controller nodes that are part of a cluster. This will happen when - we implement controller scale-out capability. - type: string - x-go-name: Hostname - metadata_url: - description: |- - MetadataURL is the public metadata URL of the GARM instance. This URL is used - by instances to fetch information they need to set themselves up. The URL itself - may be made available to runners via a reverse proxy or a load balancer. That - means that the user is responsible for telling GARM what the public URL is, by - setting this field. - type: string - x-go-name: MetadataURL - minimum_job_age_backoff: - description: |- - MinimumJobAgeBackoff is the minimum time in seconds that a job must be in queued state - before GARM will attempt to allocate a runner for it. When set to a non zero value, - GARM will ignore the job until the job's age is greater than this value. When using - the min_idle_runners feature of a pool, this gives enough time for potential idle - runners to pick up the job before GARM attempts to allocate a new runner, thus avoiding - the need to potentially scale down runners later. - format: uint64 - type: integer - x-go-name: MinimumJobAgeBackoff - version: - description: Version is the version of the GARM controller. - type: string - x-go-name: Version - webhook_url: - description: |- - WebhookURL is the base URL where the controller will receive webhooks from github. - When webhook management is used, this URL is used as a base to which the controller - UUID is appended and which will receive the webhooks. - The URL itself may be made available to instances via a reverse proxy or a load balancer. - That means that the user is responsible for telling GARM what the public URL is, by - setting this field. - type: string - x-go-name: WebhookURL - type: object - x-go-package: github.com/cloudbase/garm/params - CreateEnterpriseParams: - properties: - credentials_name: - type: string - x-go-name: CredentialsName - name: - type: string - x-go-name: Name - pool_balancer_type: - $ref: '#/definitions/PoolBalancerType' - webhook_secret: - type: string - x-go-name: WebhookSecret - type: object - x-go-package: github.com/cloudbase/garm/params - CreateGiteaCredentialsParams: - properties: - app: - $ref: '#/definitions/GithubApp' - auth_type: - $ref: '#/definitions/ForgeAuthType' - description: - type: string - x-go-name: Description - endpoint: - type: string - x-go-name: Endpoint - name: - type: string - x-go-name: Name - pat: - $ref: '#/definitions/GithubPAT' - type: object - x-go-package: github.com/cloudbase/garm/params - CreateGiteaEndpointParams: - properties: - api_base_url: - type: string - x-go-name: APIBaseURL - base_url: - type: string - x-go-name: BaseURL - ca_cert_bundle: - items: - format: uint8 - type: integer - type: array - x-go-name: CACertBundle - description: - type: string - x-go-name: Description - name: - type: string - x-go-name: Name - type: object - x-go-package: github.com/cloudbase/garm/params - CreateGithubCredentialsParams: - properties: - app: - $ref: '#/definitions/GithubApp' - auth_type: - $ref: '#/definitions/ForgeAuthType' - description: - type: string - x-go-name: Description - endpoint: - type: string - x-go-name: Endpoint - name: - type: string - x-go-name: Name - pat: - $ref: '#/definitions/GithubPAT' - type: object - x-go-package: github.com/cloudbase/garm/params - CreateGithubEndpointParams: - properties: - api_base_url: - type: string - x-go-name: APIBaseURL - base_url: - type: string - x-go-name: BaseURL - ca_cert_bundle: - items: - format: uint8 - type: integer - type: array - x-go-name: CACertBundle - description: - type: string - x-go-name: Description - name: - type: string - x-go-name: Name - upload_base_url: - type: string - x-go-name: UploadBaseURL - type: object - x-go-package: github.com/cloudbase/garm/params - CreateOrgParams: - properties: - credentials_name: - type: string - x-go-name: CredentialsName - forge_type: - $ref: '#/definitions/EndpointType' - name: - type: string - x-go-name: Name - pool_balancer_type: - $ref: '#/definitions/PoolBalancerType' - webhook_secret: - type: string - x-go-name: WebhookSecret - type: object - x-go-package: github.com/cloudbase/garm/params - CreatePoolParams: - properties: - enabled: - type: boolean - x-go-name: Enabled - extra_specs: - type: object - x-go-name: ExtraSpecs - flavor: - type: string - x-go-name: Flavor - github-runner-group: - description: |- - GithubRunnerGroup is the github runner group in which the runners of this - pool will be added to. - The runner group must be created by someone with access to the enterprise. - type: string - x-go-name: GitHubRunnerGroup - image: - type: string - x-go-name: Image - max_runners: - format: uint64 - type: integer - x-go-name: MaxRunners - min_idle_runners: - format: uint64 - type: integer - x-go-name: MinIdleRunners - os_arch: - $ref: '#/definitions/OSArch' - os_type: - $ref: '#/definitions/OSType' - priority: - format: uint64 - type: integer - x-go-name: Priority - provider_name: - type: string - x-go-name: ProviderName - runner_bootstrap_timeout: - format: uint64 - type: integer - x-go-name: RunnerBootstrapTimeout - runner_prefix: - type: string - x-go-name: Prefix - tags: - items: - type: string - type: array - x-go-name: Tags - type: object - x-go-package: github.com/cloudbase/garm/params - CreateRepoParams: - properties: - credentials_name: - type: string - x-go-name: CredentialsName - forge_type: - $ref: '#/definitions/EndpointType' - name: - type: string - x-go-name: Name - owner: - type: string - x-go-name: Owner - pool_balancer_type: - $ref: '#/definitions/PoolBalancerType' - webhook_secret: - type: string - x-go-name: WebhookSecret - type: object - x-go-package: github.com/cloudbase/garm/params - CreateScaleSetParams: - properties: - disable_update: - type: boolean - x-go-name: DisableUpdate - enabled: - type: boolean - x-go-name: Enabled - extra_specs: - type: object - x-go-name: ExtraSpecs - flavor: - type: string - x-go-name: Flavor - github-runner-group: - description: |- - GithubRunnerGroup is the github runner group in which the runners of this - pool will be added to. - The runner group must be created by someone with access to the enterprise. - type: string - x-go-name: GitHubRunnerGroup - image: - type: string - x-go-name: Image - max_runners: - format: uint64 - type: integer - x-go-name: MaxRunners - min_idle_runners: - format: uint64 - type: integer - x-go-name: MinIdleRunners - name: - type: string - x-go-name: Name - os_arch: - $ref: '#/definitions/OSArch' - os_type: - $ref: '#/definitions/OSType' - provider_name: - type: string - x-go-name: ProviderName - runner_bootstrap_timeout: - format: uint64 - type: integer - x-go-name: RunnerBootstrapTimeout - runner_prefix: - type: string - x-go-name: Prefix - scale_set_id: - format: int64 - type: integer - x-go-name: ScaleSetID - tags: - items: - type: string - type: array - x-go-name: Tags - type: object - x-go-package: github.com/cloudbase/garm/params - Credentials: - description: used by swagger client generated code - items: - $ref: '#/definitions/ForgeCredentials' - type: array - x-go-package: github.com/cloudbase/garm/params - EndpointType: - type: string - x-go-package: github.com/cloudbase/garm/params - Enterprise: - properties: - created_at: - format: date-time - type: string - x-go-name: CreatedAt - credentials: - $ref: '#/definitions/ForgeCredentials' - credentials_id: - format: uint64 - type: integer - x-go-name: CredentialsID - credentials_name: - description: |- - CredentialName is the name of the credentials associated with the enterprise. - This field is now deprecated. Use CredentialsID instead. This field will be - removed in v0.2.0. - type: string - x-go-name: CredentialsName - endpoint: - $ref: '#/definitions/ForgeEndpoint' - events: - items: - $ref: '#/definitions/EntityEvent' - type: array - x-go-name: Events - id: - type: string - x-go-name: ID - name: - type: string - x-go-name: Name - pool: - items: - $ref: '#/definitions/Pool' - type: array - x-go-name: Pools - pool_balancing_type: - $ref: '#/definitions/PoolBalancerType' - pool_manager_status: - $ref: '#/definitions/PoolManagerStatus' - updated_at: - format: date-time - type: string - x-go-name: UpdatedAt - type: object - x-go-package: github.com/cloudbase/garm/params - Enterprises: - description: used by swagger client generated code - items: - $ref: '#/definitions/Enterprise' - type: array - x-go-package: github.com/cloudbase/garm/params - EntityEvent: - properties: - created_at: - format: date-time - type: string - x-go-name: CreatedAt - event_level: - $ref: '#/definitions/EventLevel' - event_type: - $ref: '#/definitions/EventType' - id: - format: uint64 - type: integer - x-go-name: ID - message: - type: string - x-go-name: Message - type: object - x-go-package: github.com/cloudbase/garm/params - EventLevel: - type: string - x-go-package: github.com/cloudbase/garm/params - EventType: - type: string - x-go-package: github.com/cloudbase/garm/params - ForgeAuthType: - type: string - x-go-package: github.com/cloudbase/garm/params - ForgeCredentials: - properties: - api_base_url: - type: string - x-go-name: APIBaseURL - auth-type: - $ref: '#/definitions/ForgeAuthType' - base_url: - type: string - x-go-name: BaseURL - ca_bundle: - items: - format: uint8 - type: integer - type: array - x-go-name: CABundle - created_at: - format: date-time - type: string - x-go-name: CreatedAt - description: - type: string - x-go-name: Description - endpoint: - $ref: '#/definitions/ForgeEndpoint' - enterprises: - items: - $ref: '#/definitions/Enterprise' - type: array - x-go-name: Enterprises - forge_type: - $ref: '#/definitions/EndpointType' - id: - format: uint64 - type: integer - x-go-name: ID - name: - type: string - x-go-name: Name - organizations: - items: - $ref: '#/definitions/Organization' - type: array - x-go-name: Organizations - rate_limit: - $ref: '#/definitions/GithubRateLimit' - repositories: - items: - $ref: '#/definitions/Repository' - type: array - x-go-name: Repositories - updated_at: - format: date-time - type: string - x-go-name: UpdatedAt - upload_base_url: - type: string - x-go-name: UploadBaseURL - type: object - x-go-package: github.com/cloudbase/garm/params - ForgeEndpoint: - properties: - api_base_url: - type: string - x-go-name: APIBaseURL - base_url: - type: string - x-go-name: BaseURL - ca_cert_bundle: - items: - format: uint8 - type: integer - type: array - x-go-name: CACertBundle - created_at: - format: date-time - type: string - x-go-name: CreatedAt - description: - type: string - x-go-name: Description - endpoint_type: - $ref: '#/definitions/EndpointType' - name: - type: string - x-go-name: Name - updated_at: - format: date-time - type: string - x-go-name: UpdatedAt - upload_base_url: - type: string - x-go-name: UploadBaseURL - type: object - x-go-package: github.com/cloudbase/garm/params - ForgeEndpoints: - description: used by swagger client generated code - items: - $ref: '#/definitions/ForgeEndpoint' - type: array - x-go-package: github.com/cloudbase/garm/params - ForgeEntity: - properties: - agent_id: - format: int64 - type: integer - x-go-name: AgentID - os_name: - type: string - x-go-name: OSName - os_version: - type: string - x-go-name: OSVersion - type: object - x-go-name: UpdateSystemInfoParams - x-go-package: github.com/cloudbase/garm/params - GithubApp: - properties: - app_id: - format: int64 - type: integer - x-go-name: AppID - installation_id: - format: int64 - type: integer - x-go-name: InstallationID - private_key_bytes: - items: - format: uint8 - type: integer - type: array - x-go-name: PrivateKeyBytes - type: object - x-go-package: github.com/cloudbase/garm/params - GithubPAT: - properties: - oauth2_token: - type: string - x-go-name: OAuth2Token - type: object - x-go-package: github.com/cloudbase/garm/params - GithubRateLimit: - properties: - limit: - format: int64 - type: integer - x-go-name: Limit - remaining: - format: int64 - type: integer - x-go-name: Remaining - reset: - format: int64 - type: integer - x-go-name: Reset - used: - format: int64 - type: integer - x-go-name: Used - type: object - x-go-package: github.com/cloudbase/garm/params - HookInfo: - properties: - active: - type: boolean - x-go-name: Active - events: - items: - type: string - type: array - x-go-name: Events - id: - format: int64 - type: integer - x-go-name: ID - insecure_ssl: - type: boolean - x-go-name: InsecureSSL - url: - type: string - x-go-name: URL - type: object - x-go-package: github.com/cloudbase/garm/params - InstallWebhookParams: - properties: - insecure_ssl: - type: boolean - x-go-name: InsecureSSL - webhook_endpoint_type: - $ref: '#/definitions/WebhookEndpointType' - type: object - x-go-package: github.com/cloudbase/garm/params - Instance: - properties: - addresses: - description: |- - Addresses is a list of IP addresses the provider reports - for this instance. - items: - $ref: '#/definitions/Address' - type: array - x-go-name: Addresses - agent_id: - description: AgentID is the github runner agent ID. - format: int64 - type: integer - x-go-name: AgentID - created_at: - description: CreatedAt is the timestamp of the creation of this runner. - format: date-time - type: string - x-go-name: CreatedAt - github-runner-group: - description: |- - GithubRunnerGroup is the github runner group to which the runner belongs. - The runner group must be created by someone with access to the enterprise. - type: string - x-go-name: GitHubRunnerGroup - id: - description: ID is the database ID of this instance. - type: string - x-go-name: ID - job: - $ref: '#/definitions/Job' - name: - description: |- - Name is the name associated with an instance. Depending on - the provider, this may or may not be useful in the context of - the provider, but we can use it internally to identify the - instance. - type: string - x-go-name: Name - os_arch: - $ref: '#/definitions/OSArch' - os_name: - description: 'OSName is the name of the OS. Eg: ubuntu, centos, etc.' - type: string - x-go-name: OSName - os_type: - $ref: '#/definitions/OSType' - os_version: - description: OSVersion is the version of the operating system. - type: string - x-go-name: OSVersion - pool_id: - description: PoolID is the ID of the garm pool to which a runner belongs. - type: string - x-go-name: PoolID - provider_fault: - description: |- - ProviderFault holds any error messages captured from the IaaS provider that is - responsible for managing the lifecycle of the runner. - items: - format: uint8 - type: integer - type: array - x-go-name: ProviderFault - provider_id: - description: |- - PeoviderID is the unique ID the provider associated - with the compute instance. We use this to identify the - instance in the provider. - type: string - x-go-name: ProviderID - provider_name: - description: |- - ProviderName is the name of the IaaS where the instance was - created. - type: string - x-go-name: ProviderName - runner_status: - $ref: '#/definitions/RunnerStatus' - scale_set_id: - description: ScaleSetID is the ID of the scale set to which a runner belongs. - format: uint64 - type: integer - x-go-name: ScaleSetID - status: - $ref: '#/definitions/InstanceStatus' - status_messages: - description: |- - StatusMessages is a list of status messages sent back by the runner as it sets itself - up. - items: - $ref: '#/definitions/StatusMessage' - type: array - x-go-name: StatusMessages - updated_at: - description: UpdatedAt is the timestamp of the last update to this runner. - format: date-time - type: string - x-go-name: UpdatedAt - type: object - x-go-package: github.com/cloudbase/garm/params - InstanceStatus: - type: string - x-go-package: github.com/cloudbase/garm-provider-common/params - Instances: - description: used by swagger client generated code - items: - $ref: '#/definitions/Instance' - type: array - x-go-package: github.com/cloudbase/garm/params - JWTResponse: - description: |- - JWTResponse holds the JWT token returned as a result of a - successful auth - properties: - token: - type: string - x-go-name: Token - type: object - x-go-package: github.com/cloudbase/garm/params - Job: - properties: - action: - description: Action is the specific activity that triggered the event. - type: string - x-go-name: Action - completed_at: - format: date-time - type: string - x-go-name: CompletedAt - conclusion: - description: |- - Conclusion is the outcome of the job. - Possible values: "success", "failure", "neutral", "cancelled", "skipped", - "timed_out", "action_required" - type: string - x-go-name: Conclusion - created_at: - format: date-time - type: string - x-go-name: CreatedAt - enterprise_id: - format: uuid - type: string - x-go-name: EnterpriseID - id: - description: ID is the ID of the job. - format: int64 - type: integer - x-go-name: ID - labels: - items: - type: string - type: array - x-go-name: Labels - locked_by: - format: uuid - type: string - x-go-name: LockedBy - name: - description: Name is the name if the job that was triggered. - type: string - x-go-name: Name - org_id: - format: uuid - type: string - x-go-name: OrgID - repo_id: - description: |- - The entity that received the hook. - - Webhooks may be configured on the repo, the org and/or the enterprise. - If we only configure a repo to use garm, we'll only ever receive a - webhook from the repo. But if we configure the parent org of the repo and - the parent enterprise of the org to use garm, a webhook will be sent for each - entity type, in response to one workflow event. Thus, we will get 3 webhooks - with the same run_id and job id. Record all involved entities in the same job - if we have them configured in garm. - format: uuid - type: string - x-go-name: RepoID - repository_name: - description: repository in which the job was triggered. - type: string - x-go-name: RepositoryName - repository_owner: - type: string - x-go-name: RepositoryOwner - run_id: - description: RunID is the ID of the workflow run. A run may have multiple jobs. - format: int64 - type: integer - x-go-name: RunID - runner_group_id: - format: int64 - type: integer - x-go-name: RunnerGroupID - runner_group_name: - type: string - x-go-name: RunnerGroupName - runner_id: - format: int64 - type: integer - x-go-name: GithubRunnerID - runner_name: - type: string - x-go-name: RunnerName - scaleset_job_id: - description: ScaleSetJobID is the job ID when generated for a scale set. - type: string - x-go-name: ScaleSetJobID - started_at: - format: date-time - type: string - x-go-name: StartedAt - status: - description: |- - Status is the phase of the lifecycle that the job is currently in. - "queued", "in_progress" and "completed". - type: string - x-go-name: Status - updated_at: - format: date-time - type: string - x-go-name: UpdatedAt - workflow_job_id: - format: int64 - type: integer - x-go-name: WorkflowJobID - type: object - x-go-package: github.com/cloudbase/garm/params - Jobs: - items: - $ref: '#/definitions/Job' - type: array - x-go-package: github.com/cloudbase/garm/params - NewUserParams: - description: |- - NewUserParams holds the needed information to create - a new user - properties: - email: - type: string - x-go-name: Email - full_name: - type: string - x-go-name: FullName - password: - type: string - x-go-name: Password - username: - type: string - x-go-name: Username - type: object - x-go-package: github.com/cloudbase/garm/params - OSArch: - type: string - x-go-package: github.com/cloudbase/garm-provider-common/params - OSType: - type: string - x-go-package: github.com/cloudbase/garm-provider-common/params - Organization: - properties: - created_at: - format: date-time - type: string - x-go-name: CreatedAt - credentials: - $ref: '#/definitions/ForgeCredentials' - credentials_id: - format: uint64 - type: integer - x-go-name: CredentialsID - credentials_name: - description: |- - CredentialName is the name of the credentials associated with the enterprise. - This field is now deprecated. Use CredentialsID instead. This field will be - removed in v0.2.0. - type: string - x-go-name: CredentialsName - endpoint: - $ref: '#/definitions/ForgeEndpoint' - events: - items: - $ref: '#/definitions/EntityEvent' - type: array - x-go-name: Events - id: - type: string - x-go-name: ID - name: - type: string - x-go-name: Name - pool: - items: - $ref: '#/definitions/Pool' - type: array - x-go-name: Pools - pool_balancing_type: - $ref: '#/definitions/PoolBalancerType' - pool_manager_status: - $ref: '#/definitions/PoolManagerStatus' - updated_at: - format: date-time - type: string - x-go-name: UpdatedAt - type: object - x-go-package: github.com/cloudbase/garm/params - Organizations: - description: used by swagger client generated code - items: - $ref: '#/definitions/Organization' - type: array - x-go-package: github.com/cloudbase/garm/params - PasswordLoginParams: - properties: - password: - type: string - x-go-name: Password - username: - type: string - x-go-name: Username - type: object - x-go-package: github.com/cloudbase/garm/params - Pool: - properties: - created_at: - format: date-time - type: string - x-go-name: CreatedAt - enabled: - type: boolean - x-go-name: Enabled - endpoint: - $ref: '#/definitions/ForgeEndpoint' - enterprise_id: - type: string - x-go-name: EnterpriseID - enterprise_name: - type: string - x-go-name: EnterpriseName - extra_specs: - description: |- - ExtraSpecs is an opaque raw json that gets sent to the provider - as part of the bootstrap params for instances. It can contain - any kind of data needed by providers. The contents of this field means - nothing to garm itself. We don't act on the information in this field at - all. We only validate that it's a proper json. - type: object - x-go-name: ExtraSpecs - flavor: - type: string - x-go-name: Flavor - github-runner-group: - description: |- - GithubRunnerGroup is the github runner group in which the runners will be added. - The runner group must be created by someone with access to the enterprise. - type: string - x-go-name: GitHubRunnerGroup - id: - type: string - x-go-name: ID - image: - type: string - x-go-name: Image - instances: - items: - $ref: '#/definitions/Instance' - type: array - x-go-name: Instances - max_runners: - format: uint64 - type: integer - x-go-name: MaxRunners - min_idle_runners: - format: uint64 - type: integer - x-go-name: MinIdleRunners - org_id: - type: string - x-go-name: OrgID - org_name: - type: string - x-go-name: OrgName - os_arch: - $ref: '#/definitions/OSArch' - os_type: - $ref: '#/definitions/OSType' - priority: - description: |- - Priority is the priority of the pool. The higher the number, the higher the priority. - When fetching matching pools for a set of tags, the result will be sorted in descending - order of priority. - format: uint64 - type: integer - x-go-name: Priority - provider_name: - type: string - x-go-name: ProviderName - repo_id: - type: string - x-go-name: RepoID - repo_name: - type: string - x-go-name: RepoName - runner_bootstrap_timeout: - format: uint64 - type: integer - x-go-name: RunnerBootstrapTimeout - runner_prefix: - type: string - x-go-name: Prefix - tags: - items: - $ref: '#/definitions/Tag' - type: array - x-go-name: Tags - updated_at: - format: date-time - type: string - x-go-name: UpdatedAt - type: object - x-go-package: github.com/cloudbase/garm/params - PoolBalancerType: - type: string - x-go-package: github.com/cloudbase/garm/params - PoolManagerStatus: - properties: - failure_reason: - type: string - x-go-name: FailureReason - running: - type: boolean - x-go-name: IsRunning - type: object - x-go-package: github.com/cloudbase/garm/params - Pools: - description: used by swagger client generated code - items: - $ref: '#/definitions/Pool' - type: array - x-go-package: github.com/cloudbase/garm/params - Provider: - properties: - description: - type: string - x-go-name: Description - name: - type: string - x-go-name: Name - type: - $ref: '#/definitions/ProviderType' - type: object - x-go-package: github.com/cloudbase/garm/params - ProviderType: - type: string - x-go-package: github.com/cloudbase/garm/params - Providers: - description: used by swagger client generated code - items: - $ref: '#/definitions/Provider' - type: array - x-go-package: github.com/cloudbase/garm/params - Repositories: - description: used by swagger client generated code - items: - $ref: '#/definitions/Repository' - type: array - x-go-package: github.com/cloudbase/garm/params - Repository: - properties: - created_at: - format: date-time - type: string - x-go-name: CreatedAt - credentials: - $ref: '#/definitions/ForgeCredentials' - credentials_id: - format: uint64 - type: integer - x-go-name: CredentialsID - credentials_name: - description: |- - CredentialName is the name of the credentials associated with the enterprise. - This field is now deprecated. Use CredentialsID instead. This field will be - removed in v0.2.0. - type: string - x-go-name: CredentialsName - endpoint: - $ref: '#/definitions/ForgeEndpoint' - events: - items: - $ref: '#/definitions/EntityEvent' - type: array - x-go-name: Events - id: - type: string - x-go-name: ID - name: - type: string - x-go-name: Name - owner: - type: string - x-go-name: Owner - pool: - items: - $ref: '#/definitions/Pool' - type: array - x-go-name: Pools - pool_balancing_type: - $ref: '#/definitions/PoolBalancerType' - pool_manager_status: - $ref: '#/definitions/PoolManagerStatus' - updated_at: - format: date-time - type: string - x-go-name: UpdatedAt - type: object - x-go-package: github.com/cloudbase/garm/params - RunnerPrefix: - properties: - runner_prefix: - type: string - x-go-name: Prefix - type: object - x-go-package: github.com/cloudbase/garm/params - RunnerStatus: - type: string - x-go-package: github.com/cloudbase/garm/params - ScaleSet: - properties: - created_at: - format: date-time - type: string - x-go-name: CreatedAt - desired_runner_count: - format: int64 - type: integer - x-go-name: DesiredRunnerCount - disable_update: - type: boolean - x-go-name: DisableUpdate - enabled: - type: boolean - x-go-name: Enabled - endpoint: - $ref: '#/definitions/ForgeEndpoint' - enterprise_id: - type: string - x-go-name: EnterpriseID - enterprise_name: - type: string - x-go-name: EnterpriseName - extended_state: - type: string - x-go-name: ExtendedState - extra_specs: - description: |- - ExtraSpecs is an opaque raw json that gets sent to the provider - as part of the bootstrap params for instances. It can contain - any kind of data needed by providers. The contents of this field means - nothing to garm itself. We don't act on the information in this field at - all. We only validate that it's a proper json. - type: object - x-go-name: ExtraSpecs - flavor: - type: string - x-go-name: Flavor - github-runner-group: - description: |- - GithubRunnerGroup is the github runner group in which the runners will be added. - The runner group must be created by someone with access to the enterprise. - type: string - x-go-name: GitHubRunnerGroup - id: - format: uint64 - type: integer - x-go-name: ID - image: - type: string - x-go-name: Image - instances: - items: - $ref: '#/definitions/Instance' - type: array - x-go-name: Instances - max_runners: - format: uint64 - type: integer - x-go-name: MaxRunners - min_idle_runners: - format: uint64 - type: integer - x-go-name: MinIdleRunners - name: - type: string - x-go-name: Name - org_id: - type: string - x-go-name: OrgID - org_name: - type: string - x-go-name: OrgName - os_arch: - $ref: '#/definitions/OSArch' - os_type: - $ref: '#/definitions/OSType' - provider_name: - type: string - x-go-name: ProviderName - repo_id: - type: string - x-go-name: RepoID - repo_name: - type: string - x-go-name: RepoName - runner_bootstrap_timeout: - format: uint64 - type: integer - x-go-name: RunnerBootstrapTimeout - runner_prefix: - type: string - x-go-name: Prefix - scale_set_id: - format: int64 - type: integer - x-go-name: ScaleSetID - state: - $ref: '#/definitions/ScaleSetState' - status_messages: - items: - $ref: '#/definitions/StatusMessage' - type: array - x-go-name: StatusMessages - updated_at: - format: date-time - type: string - x-go-name: UpdatedAt - type: object - x-go-package: github.com/cloudbase/garm/params - ScaleSetState: - type: string - x-go-package: github.com/cloudbase/garm/params - ScaleSets: - description: used by swagger client generated code - items: - $ref: '#/definitions/ScaleSet' - type: array - x-go-package: github.com/cloudbase/garm/params - StatusMessage: - properties: - created_at: - format: date-time - type: string - x-go-name: CreatedAt - event_level: - $ref: '#/definitions/EventLevel' - event_type: - $ref: '#/definitions/EventType' - message: - type: string - x-go-name: Message - type: object - x-go-package: github.com/cloudbase/garm/params - Tag: - properties: - id: - type: string - x-go-name: ID - name: - type: string - x-go-name: Name - type: object - x-go-package: github.com/cloudbase/garm/params - UpdateControllerParams: - properties: - callback_url: - type: string - x-go-name: CallbackURL - metadata_url: - type: string - x-go-name: MetadataURL - minimum_job_age_backoff: - format: uint64 - type: integer - x-go-name: MinimumJobAgeBackoff - webhook_url: - type: string - x-go-name: WebhookURL - type: object - x-go-package: github.com/cloudbase/garm/params - UpdateEntityParams: - properties: - credentials_name: - type: string - x-go-name: CredentialsName - pool_balancer_type: - $ref: '#/definitions/PoolBalancerType' - webhook_secret: - type: string - x-go-name: WebhookSecret - type: object - x-go-package: github.com/cloudbase/garm/params - UpdateGiteaCredentialsParams: - properties: - description: - type: string - x-go-name: Description - name: - type: string - x-go-name: Name - pat: - $ref: '#/definitions/GithubPAT' - type: object - x-go-package: github.com/cloudbase/garm/params - UpdateGiteaEndpointParams: - properties: - api_base_url: - type: string - x-go-name: APIBaseURL - base_url: - type: string - x-go-name: BaseURL - ca_cert_bundle: - items: - format: uint8 - type: integer - type: array - x-go-name: CACertBundle - description: - type: string - x-go-name: Description - type: object - x-go-package: github.com/cloudbase/garm/params - UpdateGithubCredentialsParams: - properties: - app: - $ref: '#/definitions/GithubApp' - description: - type: string - x-go-name: Description - name: - type: string - x-go-name: Name - pat: - $ref: '#/definitions/GithubPAT' - type: object - x-go-package: github.com/cloudbase/garm/params - UpdateGithubEndpointParams: - properties: - api_base_url: - type: string - x-go-name: APIBaseURL - base_url: - type: string - x-go-name: BaseURL - ca_cert_bundle: - items: - format: uint8 - type: integer - type: array - x-go-name: CACertBundle - description: - type: string - x-go-name: Description - upload_base_url: - type: string - x-go-name: UploadBaseURL - type: object - x-go-package: github.com/cloudbase/garm/params - UpdatePoolParams: - properties: - enabled: - type: boolean - x-go-name: Enabled - extra_specs: - type: object - x-go-name: ExtraSpecs - flavor: - type: string - x-go-name: Flavor - github-runner-group: - description: |- - GithubRunnerGroup is the github runner group in which the runners of this - pool will be added to. - The runner group must be created by someone with access to the enterprise. - type: string - x-go-name: GitHubRunnerGroup - image: - type: string - x-go-name: Image - max_runners: - format: uint64 - type: integer - x-go-name: MaxRunners - min_idle_runners: - format: uint64 - type: integer - x-go-name: MinIdleRunners - os_arch: - $ref: '#/definitions/OSArch' - os_type: - $ref: '#/definitions/OSType' - priority: - format: uint64 - type: integer - x-go-name: Priority - runner_bootstrap_timeout: - format: uint64 - type: integer - x-go-name: RunnerBootstrapTimeout - runner_prefix: - type: string - x-go-name: Prefix - tags: - items: - type: string - type: array - x-go-name: Tags - type: object - x-go-package: github.com/cloudbase/garm/params - UpdateScaleSetParams: - properties: - enabled: - type: boolean - x-go-name: Enabled - extended_state: - type: string - x-go-name: ExtendedState - extra_specs: - type: object - x-go-name: ExtraSpecs - flavor: - type: string - x-go-name: Flavor - image: - type: string - x-go-name: Image - max_runners: - format: uint64 - type: integer - x-go-name: MaxRunners - min_idle_runners: - format: uint64 - type: integer - x-go-name: MinIdleRunners - name: - type: string - x-go-name: Name - os_arch: - $ref: '#/definitions/OSArch' - os_type: - $ref: '#/definitions/OSType' - runner_bootstrap_timeout: - format: uint64 - type: integer - x-go-name: RunnerBootstrapTimeout - runner_group: - description: |- - GithubRunnerGroup is the github runner group in which the runners of this - pool will be added to. - The runner group must be created by someone with access to the enterprise. - type: string - x-go-name: GitHubRunnerGroup - runner_prefix: - type: string - x-go-name: Prefix - state: - $ref: '#/definitions/ScaleSetState' - type: object - x-go-package: github.com/cloudbase/garm/params - User: - description: Users holds information about a particular user - properties: - created_at: - format: date-time - type: string - x-go-name: CreatedAt - email: - type: string - x-go-name: Email - enabled: - type: boolean - x-go-name: Enabled - full_name: - type: string - x-go-name: FullName - id: - type: string - x-go-name: ID - is_admin: - type: boolean - x-go-name: IsAdmin - updated_at: - format: date-time - type: string - x-go-name: UpdatedAt - username: - type: string - x-go-name: Username - type: object - x-go-package: github.com/cloudbase/garm/params - WebhookEndpointType: - type: string - x-go-package: github.com/cloudbase/garm/params -info: - description: The Garm API generated using go-swagger. - license: - name: Apache 2.0 - url: https://www.apache.org/licenses/LICENSE-2.0 - title: Garm API. - version: 1.0.0 -paths: - /auth/login: - post: - operationId: Login - parameters: - - description: Login information. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/PasswordLoginParams' - description: Login information. - type: object - responses: - "200": - description: JWTResponse - schema: - $ref: '#/definitions/JWTResponse' - "400": - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Logs in a user and returns a JWT token. - tags: - - login - /controller: - put: - operationId: UpdateController - parameters: - - description: Parameters used when updating the controller. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/UpdateControllerParams' - description: Parameters used when updating the controller. - type: object - responses: - "200": - description: ControllerInfo - schema: - $ref: '#/definitions/ControllerInfo' - "400": - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Update controller. - tags: - - controller - /controller-info: - get: - operationId: ControllerInfo - responses: - "200": - description: ControllerInfo - schema: - $ref: '#/definitions/ControllerInfo' - "409": - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Get controller info. - tags: - - controllerInfo - /enterprises: - get: - operationId: ListEnterprises - parameters: - - description: Exact enterprise name to filter by - in: query - name: name - type: string - - description: Exact endpoint name to filter by - in: query - name: endpoint - type: string - responses: - "200": - description: Enterprises - schema: - $ref: '#/definitions/Enterprises' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List all enterprises. - tags: - - enterprises - post: - operationId: CreateEnterprise - parameters: - - description: Parameters used to create the enterprise. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/CreateEnterpriseParams' - description: Parameters used to create the enterprise. - type: object - responses: - "200": - description: Enterprise - schema: - $ref: '#/definitions/Enterprise' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Create enterprise with the given parameters. - tags: - - enterprises - /enterprises/{enterpriseID}: - delete: - operationId: DeleteEnterprise - parameters: - - description: ID of the enterprise to delete. - in: path - name: enterpriseID - required: true - type: string - responses: - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Delete enterprise by ID. - tags: - - enterprises - get: - operationId: GetEnterprise - parameters: - - description: The ID of the enterprise to fetch. - in: path - name: enterpriseID - required: true - type: string - responses: - "200": - description: Enterprise - schema: - $ref: '#/definitions/Enterprise' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Get enterprise by ID. - tags: - - enterprises - put: - operationId: UpdateEnterprise - parameters: - - description: The ID of the enterprise to update. - in: path - name: enterpriseID - required: true - type: string - - description: Parameters used when updating the enterprise. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/UpdateEntityParams' - description: Parameters used when updating the enterprise. - type: object - responses: - "200": - description: Enterprise - schema: - $ref: '#/definitions/Enterprise' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Update enterprise with the given parameters. - tags: - - enterprises - /enterprises/{enterpriseID}/instances: - get: - operationId: ListEnterpriseInstances - parameters: - - description: Enterprise ID. - in: path - name: enterpriseID - required: true - type: string - responses: - "200": - description: Instances - schema: - $ref: '#/definitions/Instances' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List enterprise instances. - tags: - - enterprises - - instances - /enterprises/{enterpriseID}/pools: - get: - operationId: ListEnterprisePools - parameters: - - description: Enterprise ID. - in: path - name: enterpriseID - required: true - type: string - responses: - "200": - description: Pools - schema: - $ref: '#/definitions/Pools' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List enterprise pools. - tags: - - enterprises - - pools - post: - operationId: CreateEnterprisePool - parameters: - - description: Enterprise ID. - in: path - name: enterpriseID - required: true - type: string - - description: Parameters used when creating the enterprise pool. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/CreatePoolParams' - description: Parameters used when creating the enterprise pool. - type: object - responses: - "200": - description: Pool - schema: - $ref: '#/definitions/Pool' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Create enterprise pool with the parameters given. - tags: - - enterprises - - pools - /enterprises/{enterpriseID}/pools/{poolID}: - delete: - operationId: DeleteEnterprisePool - parameters: - - description: Enterprise ID. - in: path - name: enterpriseID - required: true - type: string - - description: ID of the enterprise pool to delete. - in: path - name: poolID - required: true - type: string - responses: - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Delete enterprise pool by ID. - tags: - - enterprises - - pools - get: - operationId: GetEnterprisePool - parameters: - - description: Enterprise ID. - in: path - name: enterpriseID - required: true - type: string - - description: Pool ID. - in: path - name: poolID - required: true - type: string - responses: - "200": - description: Pool - schema: - $ref: '#/definitions/Pool' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Get enterprise pool by ID. - tags: - - enterprises - - pools - put: - operationId: UpdateEnterprisePool - parameters: - - description: Enterprise ID. - in: path - name: enterpriseID - required: true - type: string - - description: ID of the enterprise pool to update. - in: path - name: poolID - required: true - type: string - - description: Parameters used when updating the enterprise pool. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/UpdatePoolParams' - description: Parameters used when updating the enterprise pool. - type: object - responses: - "200": - description: Pool - schema: - $ref: '#/definitions/Pool' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Update enterprise pool with the parameters given. - tags: - - enterprises - - pools - /enterprises/{enterpriseID}/scalesets: - get: - operationId: ListEnterpriseScaleSets - parameters: - - description: Enterprise ID. - in: path - name: enterpriseID - required: true - type: string - responses: - "200": - description: ScaleSets - schema: - $ref: '#/definitions/ScaleSets' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List enterprise scale sets. - tags: - - enterprises - - scalesets - post: - operationId: CreateEnterpriseScaleSet - parameters: - - description: Enterprise ID. - in: path - name: enterpriseID - required: true - type: string - - description: Parameters used when creating the enterprise scale set. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/CreateScaleSetParams' - description: Parameters used when creating the enterprise scale set. - type: object - responses: - "200": - description: ScaleSet - schema: - $ref: '#/definitions/ScaleSet' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Create enterprise pool with the parameters given. - tags: - - enterprises - - scalesets - /first-run: - post: - operationId: FirstRun - parameters: - - description: Create a new user. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/NewUserParams' - description: Create a new user. - type: object - responses: - "200": - description: User - schema: - $ref: '#/definitions/User' - "400": - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Initialize the first run of the controller. - tags: - - first-run - /gitea/credentials: - get: - operationId: ListGiteaCredentials - responses: - "200": - description: Credentials - schema: - $ref: '#/definitions/Credentials' - "400": - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List all credentials. - tags: - - credentials - post: - operationId: CreateGiteaCredentials - parameters: - - description: Parameters used when creating a Gitea credential. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/CreateGiteaCredentialsParams' - description: Parameters used when creating a Gitea credential. - type: object - responses: - "200": - description: ForgeCredentials - schema: - $ref: '#/definitions/ForgeCredentials' - "400": - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Create a Gitea credential. - tags: - - credentials - /gitea/credentials/{id}: - delete: - operationId: DeleteGiteaCredentials - parameters: - - description: ID of the Gitea credential. - in: path - name: id - required: true - type: integer - responses: - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Delete a Gitea credential. - tags: - - credentials - get: - operationId: GetGiteaCredentials - parameters: - - description: ID of the Gitea credential. - in: path - name: id - required: true - type: integer - responses: - "200": - description: ForgeCredentials - schema: - $ref: '#/definitions/ForgeCredentials' - "400": - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Get a Gitea credential. - tags: - - credentials - put: - operationId: UpdateGiteaCredentials - parameters: - - description: ID of the Gitea credential. - in: path - name: id - required: true - type: integer - - description: Parameters used when updating a Gitea credential. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/UpdateGiteaCredentialsParams' - description: Parameters used when updating a Gitea credential. - type: object - responses: - "200": - description: ForgeCredentials - schema: - $ref: '#/definitions/ForgeCredentials' - "400": - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Update a Gitea credential. - tags: - - credentials - /gitea/endpoints: - get: - operationId: ListGiteaEndpoints - responses: - "200": - description: ForgeEndpoints - schema: - $ref: '#/definitions/ForgeEndpoints' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List all Gitea Endpoints. - tags: - - endpoints - post: - operationId: CreateGiteaEndpoint - parameters: - - description: Parameters used when creating a Gitea endpoint. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/CreateGiteaEndpointParams' - description: Parameters used when creating a Gitea endpoint. - type: object - responses: - "200": - description: ForgeEndpoint - schema: - $ref: '#/definitions/ForgeEndpoint' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Create a Gitea Endpoint. - tags: - - endpoints - /gitea/endpoints/{name}: - delete: - operationId: DeleteGiteaEndpoint - parameters: - - description: The name of the Gitea endpoint. - in: path - name: name - required: true - type: string - responses: - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Delete a Gitea Endpoint. - tags: - - endpoints - get: - operationId: GetGiteaEndpoint - parameters: - - description: The name of the Gitea endpoint. - in: path - name: name - required: true - type: string - responses: - "200": - description: ForgeEndpoint - schema: - $ref: '#/definitions/ForgeEndpoint' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Get a Gitea Endpoint. - tags: - - endpoints - put: - operationId: UpdateGiteaEndpoint - parameters: - - description: The name of the Gitea endpoint. - in: path - name: name - required: true - type: string - - description: Parameters used when updating a Gitea endpoint. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/UpdateGiteaEndpointParams' - description: Parameters used when updating a Gitea endpoint. - type: object - responses: - "200": - description: ForgeEndpoint - schema: - $ref: '#/definitions/ForgeEndpoint' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Update a Gitea Endpoint. - tags: - - endpoints - /github/credentials: - get: - operationId: ListCredentials - responses: - "200": - description: Credentials - schema: - $ref: '#/definitions/Credentials' - "400": - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List all credentials. - tags: - - credentials - post: - operationId: CreateCredentials - parameters: - - description: Parameters used when creating a GitHub credential. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/CreateGithubCredentialsParams' - description: Parameters used when creating a GitHub credential. - type: object - responses: - "200": - description: ForgeCredentials - schema: - $ref: '#/definitions/ForgeCredentials' - "400": - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Create a GitHub credential. - tags: - - credentials - /github/credentials/{id}: - delete: - operationId: DeleteCredentials - parameters: - - description: ID of the GitHub credential. - in: path - name: id - required: true - type: integer - responses: - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Delete a GitHub credential. - tags: - - credentials - get: - operationId: GetCredentials - parameters: - - description: ID of the GitHub credential. - in: path - name: id - required: true - type: integer - responses: - "200": - description: ForgeCredentials - schema: - $ref: '#/definitions/ForgeCredentials' - "400": - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Get a GitHub credential. - tags: - - credentials - put: - operationId: UpdateCredentials - parameters: - - description: ID of the GitHub credential. - in: path - name: id - required: true - type: integer - - description: Parameters used when updating a GitHub credential. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/UpdateGithubCredentialsParams' - description: Parameters used when updating a GitHub credential. - type: object - responses: - "200": - description: ForgeCredentials - schema: - $ref: '#/definitions/ForgeCredentials' - "400": - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Update a GitHub credential. - tags: - - credentials - /github/endpoints: - get: - operationId: ListGithubEndpoints - responses: - "200": - description: ForgeEndpoints - schema: - $ref: '#/definitions/ForgeEndpoints' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List all GitHub Endpoints. - tags: - - endpoints - post: - operationId: CreateGithubEndpoint - parameters: - - description: Parameters used when creating a GitHub endpoint. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/CreateGithubEndpointParams' - description: Parameters used when creating a GitHub endpoint. - type: object - responses: - "200": - description: ForgeEndpoint - schema: - $ref: '#/definitions/ForgeEndpoint' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Create a GitHub Endpoint. - tags: - - endpoints - /github/endpoints/{name}: - delete: - operationId: DeleteGithubEndpoint - parameters: - - description: The name of the GitHub endpoint. - in: path - name: name - required: true - type: string - responses: - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Delete a GitHub Endpoint. - tags: - - endpoints - get: - operationId: GetGithubEndpoint - parameters: - - description: The name of the GitHub endpoint. - in: path - name: name - required: true - type: string - responses: - "200": - description: ForgeEndpoint - schema: - $ref: '#/definitions/ForgeEndpoint' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Get a GitHub Endpoint. - tags: - - endpoints - put: - operationId: UpdateGithubEndpoint - parameters: - - description: The name of the GitHub endpoint. - in: path - name: name - required: true - type: string - - description: Parameters used when updating a GitHub endpoint. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/UpdateGithubEndpointParams' - description: Parameters used when updating a GitHub endpoint. - type: object - responses: - "200": - description: ForgeEndpoint - schema: - $ref: '#/definitions/ForgeEndpoint' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Update a GitHub Endpoint. - tags: - - endpoints - /instances: - get: - operationId: ListInstances - responses: - "200": - description: Instances - schema: - $ref: '#/definitions/Instances' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Get all runners' instances. - tags: - - instances - /instances/{instanceName}: - delete: - operationId: DeleteInstance - parameters: - - description: Runner instance name. - in: path - name: instanceName - required: true - type: string - - description: If true GARM will ignore any provider error when removing the runner and will continue to remove the runner from github and the GARM database. - in: query - name: forceRemove - type: boolean - - description: If true GARM will ignore unauthorized errors returned by GitHub when removing a runner. This is useful if you want to clean up runners and your credentials have expired. - in: query - name: bypassGHUnauthorized - type: boolean - responses: - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Delete runner instance by name. - tags: - - instances - get: - operationId: GetInstance - parameters: - - description: Runner instance name. - in: path - name: instanceName - required: true - type: string - responses: - "200": - description: Instance - schema: - $ref: '#/definitions/Instance' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Get runner instance by name. - tags: - - instances - /jobs: - get: - operationId: ListJobs - responses: - "200": - description: Jobs - schema: - $ref: '#/definitions/Jobs' - "400": - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List all jobs. - tags: - - jobs - /metrics-token: - get: - operationId: GetMetricsToken - responses: - "200": - description: JWTResponse - schema: - $ref: '#/definitions/JWTResponse' - "401": - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Returns a JWT token that can be used to access the metrics endpoint. - tags: - - metrics-token - /organizations: - get: - operationId: ListOrgs - parameters: - - description: Exact organization name to filter by - in: query - name: name - type: string - - description: Exact endpoint name to filter by - in: query - name: endpoint - type: string - responses: - "200": - description: Organizations - schema: - $ref: '#/definitions/Organizations' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List organizations. - tags: - - organizations - post: - operationId: CreateOrg - parameters: - - description: Parameters used when creating the organization. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/CreateOrgParams' - description: Parameters used when creating the organization. - type: object - responses: - "200": - description: Organization - schema: - $ref: '#/definitions/Organization' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Create organization with the parameters given. - tags: - - organizations - /organizations/{orgID}: - delete: - operationId: DeleteOrg - parameters: - - description: ID of the organization to delete. - in: path - name: orgID - required: true - type: string - - description: If true and a webhook is installed for this organization, it will not be removed. - in: query - name: keepWebhook - type: boolean - responses: - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Delete organization by ID. - tags: - - organizations - get: - operationId: GetOrg - parameters: - - description: ID of the organization to fetch. - in: path - name: orgID - required: true - type: string - responses: - "200": - description: Organization - schema: - $ref: '#/definitions/Organization' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Get organization by ID. - tags: - - organizations - put: - operationId: UpdateOrg - parameters: - - description: ID of the organization to update. - in: path - name: orgID - required: true - type: string - - description: Parameters used when updating the organization. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/UpdateEntityParams' - description: Parameters used when updating the organization. - type: object - responses: - "200": - description: Organization - schema: - $ref: '#/definitions/Organization' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Update organization with the parameters given. - tags: - - organizations - /organizations/{orgID}/instances: - get: - operationId: ListOrgInstances - parameters: - - description: Organization ID. - in: path - name: orgID - required: true - type: string - responses: - "200": - description: Instances - schema: - $ref: '#/definitions/Instances' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List organization instances. - tags: - - organizations - - instances - /organizations/{orgID}/pools: - get: - operationId: ListOrgPools - parameters: - - description: Organization ID. - in: path - name: orgID - required: true - type: string - responses: - "200": - description: Pools - schema: - $ref: '#/definitions/Pools' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List organization pools. - tags: - - organizations - - pools - post: - operationId: CreateOrgPool - parameters: - - description: Organization ID. - in: path - name: orgID - required: true - type: string - - description: Parameters used when creating the organization pool. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/CreatePoolParams' - description: Parameters used when creating the organization pool. - type: object - responses: - "200": - description: Pool - schema: - $ref: '#/definitions/Pool' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Create organization pool with the parameters given. - tags: - - organizations - - pools - /organizations/{orgID}/pools/{poolID}: - delete: - operationId: DeleteOrgPool - parameters: - - description: Organization ID. - in: path - name: orgID - required: true - type: string - - description: ID of the organization pool to delete. - in: path - name: poolID - required: true - type: string - responses: - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Delete organization pool by ID. - tags: - - organizations - - pools - get: - operationId: GetOrgPool - parameters: - - description: Organization ID. - in: path - name: orgID - required: true - type: string - - description: Pool ID. - in: path - name: poolID - required: true - type: string - responses: - "200": - description: Pool - schema: - $ref: '#/definitions/Pool' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Get organization pool by ID. - tags: - - organizations - - pools - put: - operationId: UpdateOrgPool - parameters: - - description: Organization ID. - in: path - name: orgID - required: true - type: string - - description: ID of the organization pool to update. - in: path - name: poolID - required: true - type: string - - description: Parameters used when updating the organization pool. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/UpdatePoolParams' - description: Parameters used when updating the organization pool. - type: object - responses: - "200": - description: Pool - schema: - $ref: '#/definitions/Pool' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Update organization pool with the parameters given. - tags: - - organizations - - pools - /organizations/{orgID}/scalesets: - get: - operationId: ListOrgScaleSets - parameters: - - description: Organization ID. - in: path - name: orgID - required: true - type: string - responses: - "200": - description: ScaleSets - schema: - $ref: '#/definitions/ScaleSets' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List organization scale sets. - tags: - - organizations - - scalesets - post: - operationId: CreateOrgScaleSet - parameters: - - description: Organization ID. - in: path - name: orgID - required: true - type: string - - description: Parameters used when creating the organization scale set. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/CreateScaleSetParams' - description: Parameters used when creating the organization scale set. - type: object - responses: - "200": - description: ScaleSet - schema: - $ref: '#/definitions/ScaleSet' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Create organization scale set with the parameters given. - tags: - - organizations - - scalesets - /organizations/{orgID}/webhook: - delete: - operationId: UninstallOrgWebhook - parameters: - - description: Organization ID. - in: path - name: orgID - required: true - type: string - responses: - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Uninstall organization webhook. - tags: - - organizations - - hooks - get: - operationId: GetOrgWebhookInfo - parameters: - - description: Organization ID. - in: path - name: orgID - required: true - type: string - responses: - "200": - description: HookInfo - schema: - $ref: '#/definitions/HookInfo' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Get information about the GARM installed webhook on an organization. - tags: - - organizations - - hooks - post: - description: |- - Install the GARM webhook for an organization. The secret configured on the organization will - be used to validate the requests. - operationId: InstallOrgWebhook - parameters: - - description: Organization ID. - in: path - name: orgID - required: true - type: string - - description: Parameters used when creating the organization webhook. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/InstallWebhookParams' - description: Parameters used when creating the organization webhook. - type: object - responses: - "200": - description: HookInfo - schema: - $ref: '#/definitions/HookInfo' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - tags: - - organizations - - hooks - /pools: - get: - operationId: ListPools - responses: - "200": - description: Pools - schema: - $ref: '#/definitions/Pools' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List all pools. - tags: - - pools - /pools/{poolID}: - delete: - operationId: DeletePool - parameters: - - description: ID of the pool to delete. - in: path - name: poolID - required: true - type: string - responses: - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Delete pool by ID. - tags: - - pools - get: - operationId: GetPool - parameters: - - description: ID of the pool to fetch. - in: path - name: poolID - required: true - type: string - responses: - "200": - description: Pool - schema: - $ref: '#/definitions/Pool' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Get pool by ID. - tags: - - pools - put: - operationId: UpdatePool - parameters: - - description: ID of the pool to update. - in: path - name: poolID - required: true - type: string - - description: Parameters to update the pool with. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/UpdatePoolParams' - description: Parameters to update the pool with. - type: object - responses: - "200": - description: Pool - schema: - $ref: '#/definitions/Pool' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Update pool by ID. - tags: - - pools - /pools/{poolID}/instances: - get: - operationId: ListPoolInstances - parameters: - - description: Runner pool ID. - in: path - name: poolID - required: true - type: string - responses: - "200": - description: Instances - schema: - $ref: '#/definitions/Instances' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List runner instances in a pool. - tags: - - instances - /providers: - get: - operationId: ListProviders - responses: - "200": - description: Providers - schema: - $ref: '#/definitions/Providers' - "400": - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List all providers. - tags: - - providers - /repositories: - get: - operationId: ListRepos - parameters: - - description: Exact owner name to filter by - in: query - name: owner - type: string - - description: Exact repository name to filter by - in: query - name: name - type: string - - description: Exact endpoint name to filter by - in: query - name: endpoint - type: string - responses: - "200": - description: Repositories - schema: - $ref: '#/definitions/Repositories' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List repositories. - tags: - - repositories - post: - operationId: CreateRepo - parameters: - - description: Parameters used when creating the repository. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/CreateRepoParams' - description: Parameters used when creating the repository. - type: object - responses: - "200": - description: Repository - schema: - $ref: '#/definitions/Repository' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Create repository with the parameters given. - tags: - - repositories - /repositories/{repoID}: - delete: - operationId: DeleteRepo - parameters: - - description: ID of the repository to delete. - in: path - name: repoID - required: true - type: string - - description: If true and a webhook is installed for this repo, it will not be removed. - in: query - name: keepWebhook - type: boolean - responses: - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Delete repository by ID. - tags: - - repositories - get: - operationId: GetRepo - parameters: - - description: ID of the repository to fetch. - in: path - name: repoID - required: true - type: string - responses: - "200": - description: Repository - schema: - $ref: '#/definitions/Repository' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Get repository by ID. - tags: - - repositories - put: - operationId: UpdateRepo - parameters: - - description: ID of the repository to update. - in: path - name: repoID - required: true - type: string - - description: Parameters used when updating the repository. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/UpdateEntityParams' - description: Parameters used when updating the repository. - type: object - responses: - "200": - description: Repository - schema: - $ref: '#/definitions/Repository' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Update repository with the parameters given. - tags: - - repositories - /repositories/{repoID}/instances: - get: - operationId: ListRepoInstances - parameters: - - description: Repository ID. - in: path - name: repoID - required: true - type: string - responses: - "200": - description: Instances - schema: - $ref: '#/definitions/Instances' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List repository instances. - tags: - - repositories - - instances - /repositories/{repoID}/pools: - get: - operationId: ListRepoPools - parameters: - - description: Repository ID. - in: path - name: repoID - required: true - type: string - responses: - "200": - description: Pools - schema: - $ref: '#/definitions/Pools' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List repository pools. - tags: - - repositories - - pools - post: - operationId: CreateRepoPool - parameters: - - description: Repository ID. - in: path - name: repoID - required: true - type: string - - description: Parameters used when creating the repository pool. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/CreatePoolParams' - description: Parameters used when creating the repository pool. - type: object - responses: - "200": - description: Pool - schema: - $ref: '#/definitions/Pool' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Create repository pool with the parameters given. - tags: - - repositories - - pools - /repositories/{repoID}/pools/{poolID}: - delete: - operationId: DeleteRepoPool - parameters: - - description: Repository ID. - in: path - name: repoID - required: true - type: string - - description: ID of the repository pool to delete. - in: path - name: poolID - required: true - type: string - responses: - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Delete repository pool by ID. - tags: - - repositories - - pools - get: - operationId: GetRepoPool - parameters: - - description: Repository ID. - in: path - name: repoID - required: true - type: string - - description: Pool ID. - in: path - name: poolID - required: true - type: string - responses: - "200": - description: Pool - schema: - $ref: '#/definitions/Pool' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Get repository pool by ID. - tags: - - repositories - - pools - put: - operationId: UpdateRepoPool - parameters: - - description: Repository ID. - in: path - name: repoID - required: true - type: string - - description: ID of the repository pool to update. - in: path - name: poolID - required: true - type: string - - description: Parameters used when updating the repository pool. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/UpdatePoolParams' - description: Parameters used when updating the repository pool. - type: object - responses: - "200": - description: Pool - schema: - $ref: '#/definitions/Pool' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Update repository pool with the parameters given. - tags: - - repositories - - pools - /repositories/{repoID}/scalesets: - get: - operationId: ListRepoScaleSets - parameters: - - description: Repository ID. - in: path - name: repoID - required: true - type: string - responses: - "200": - description: ScaleSets - schema: - $ref: '#/definitions/ScaleSets' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List repository scale sets. - tags: - - repositories - - scalesets - post: - operationId: CreateRepoScaleSet - parameters: - - description: Repository ID. - in: path - name: repoID - required: true - type: string - - description: Parameters used when creating the repository scale set. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/CreateScaleSetParams' - description: Parameters used when creating the repository scale set. - type: object - responses: - "200": - description: ScaleSet - schema: - $ref: '#/definitions/ScaleSet' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Create repository scale set with the parameters given. - tags: - - repositories - - scalesets - /repositories/{repoID}/webhook: - delete: - operationId: UninstallRepoWebhook - parameters: - - description: Repository ID. - in: path - name: repoID - required: true - type: string - responses: - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Uninstall organization webhook. - tags: - - repositories - - hooks - get: - operationId: GetRepoWebhookInfo - parameters: - - description: Repository ID. - in: path - name: repoID - required: true - type: string - responses: - "200": - description: HookInfo - schema: - $ref: '#/definitions/HookInfo' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Get information about the GARM installed webhook on a repository. - tags: - - repositories - - hooks - post: - description: |- - Install the GARM webhook for an organization. The secret configured on the organization will - be used to validate the requests. - operationId: InstallRepoWebhook - parameters: - - description: Repository ID. - in: path - name: repoID - required: true - type: string - - description: Parameters used when creating the repository webhook. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/InstallWebhookParams' - description: Parameters used when creating the repository webhook. - type: object - responses: - "200": - description: HookInfo - schema: - $ref: '#/definitions/HookInfo' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - tags: - - repositories - - hooks - /scalesets: - get: - operationId: ListScalesets - responses: - "200": - description: ScaleSets - schema: - $ref: '#/definitions/ScaleSets' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List all scalesets. - tags: - - scalesets - /scalesets/{scalesetID}: - delete: - operationId: DeleteScaleSet - parameters: - - description: ID of the scale set to delete. - in: path - name: scalesetID - required: true - type: string - responses: - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Delete scale set by ID. - tags: - - scalesets - get: - operationId: GetScaleSet - parameters: - - description: ID of the scale set to fetch. - in: path - name: scalesetID - required: true - type: string - responses: - "200": - description: ScaleSet - schema: - $ref: '#/definitions/ScaleSet' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Get scale set by ID. - tags: - - scalesets - put: - operationId: UpdateScaleSet - parameters: - - description: ID of the scale set to update. - in: path - name: scalesetID - required: true - type: string - - description: Parameters to update the scale set with. - in: body - name: Body - required: true - schema: - $ref: '#/definitions/UpdateScaleSetParams' - description: Parameters to update the scale set with. - type: object - responses: - "200": - description: ScaleSet - schema: - $ref: '#/definitions/ScaleSet' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: Update scale set by ID. - tags: - - scalesets - /scalesets/{scalesetID}/instances: - get: - operationId: ListScaleSetInstances - parameters: - - description: Runner scale set ID. - in: path - name: scalesetID - required: true - type: string - responses: - "200": - description: Instances - schema: - $ref: '#/definitions/Instances' - default: - description: APIErrorResponse - schema: - $ref: '#/definitions/APIErrorResponse' - summary: List runner instances in a scale set. - tags: - - instances -produces: - - application/json -security: - - Bearer: [] -securityDefinitions: - Bearer: - description: 'The token with the `Bearer: ` prefix, e.g. "Bearer abcde12345".' - in: header - name: Authorization - type: apiKey -swagger: "2.0" diff --git a/webapp/tailwind.config.js b/webapp/tailwind.config.js deleted file mode 100644 index b8efa028..00000000 --- a/webapp/tailwind.config.js +++ /dev/null @@ -1,34 +0,0 @@ -/** @type {import('tailwindcss').Config} */ -export default { - content: ['./src/**/*.{html,js,svelte,ts}'], - darkMode: 'class', - theme: { - screens: { - 'sm': '640px', - 'md': '768px', - 'lg': '1024px', - 'xl': '1280px', - '2xl': '1536px' - }, - extend: { - colors: { - primary: { - 50: '#eff6ff', - 100: '#dbeafe', - 200: '#bfdbfe', - 300: '#93c5fd', - 400: '#60a5fa', - 500: '#3b82f6', - 600: '#2563eb', - 700: '#1d4ed8', - 800: '#1e40af', - 900: '#1e3a8a' - } - } - } - }, - plugins: [ - import('@tailwindcss/forms'), - import('@tailwindcss/typography') - ] -}; \ No newline at end of file diff --git a/webapp/tsconfig.json b/webapp/tsconfig.json deleted file mode 100644 index cf31bef8..00000000 --- a/webapp/tsconfig.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "extends": "./.svelte-kit/tsconfig.json", - "compilerOptions": { - "allowJs": true, - "checkJs": true, - "esModuleInterop": true, - "forceConsistentCasingInFileNames": true, - "resolveJsonModule": true, - "skipLibCheck": true, - "sourceMap": true, - "strict": true, - "moduleResolution": "bundler" - } -} \ No newline at end of file diff --git a/webapp/vite.config.ts b/webapp/vite.config.ts deleted file mode 100644 index b7cc77ce..00000000 --- a/webapp/vite.config.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { sveltekit } from '@sveltejs/kit/vite'; -import { defineConfig, loadEnv } from 'vite'; - -export default defineConfig(({ mode }) => { - // Load env variables based on the current mode - // Third param '' means load all variables, not just those prefixed with VITE_ - const env = loadEnv(mode, process.cwd(), ''); - - console.log(env.VITE_GARM_API_URL); - return { - plugins: [sveltekit()], - server: { - proxy: { - // Proxy API calls to GARM backend - '/api': { - target: env.VITE_GARM_API_URL, - changeOrigin: true, - ws: true, - configure: (proxy, _options) => { - proxy.on('error', (err, _req, _res) => { - console.log('proxy error', err); - }); - proxy.on('proxyReq', (proxyReq, req, _res) => { - console.log('Sending Request to the Target:', req.method, req.url); - }); - proxy.on('proxyRes', (proxyRes, req, _res) => { - console.log('Received Response from the Target:', proxyRes.statusCode, req.url); - }); - }, - secure: false - } - } - } - }; -}); - diff --git a/webapp/vitest.config.ts b/webapp/vitest.config.ts deleted file mode 100644 index 202bdd51..00000000 --- a/webapp/vitest.config.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { defineConfig } from 'vitest/config'; -import { sveltekit } from '@sveltejs/kit/vite'; - -export default defineConfig({ - plugins: [sveltekit()], - test: { - include: ['src/**/*.{test,spec}.{js,ts}'], - environment: 'jsdom', - setupFiles: ['src/test/setup.ts'], - globals: true, - // Browser mode disabled for now - requires @vitest/browser package - browser: { - enabled: false, - name: 'chromium', - provider: 'playwright' - } - }, - // Tell Vitest to use the `browser` entry points in `package.json` files, even though it's running in Node - resolve: process.env.VITEST - ? { - conditions: ['browser'] - } - : undefined -}); \ No newline at end of file diff --git a/websocket/client.go b/websocket/client.go index be3b5bc0..5b80ba81 100644 --- a/websocket/client.go +++ b/websocket/client.go @@ -1,22 +1,7 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package websocket import ( "context" - "errors" "fmt" "log/slog" "net" @@ -25,6 +10,7 @@ import ( "github.com/google/uuid" "github.com/gorilla/websocket" + "github.com/pkg/errors" "github.com/cloudbase/garm/auth" "github.com/cloudbase/garm/database/common" @@ -63,7 +49,7 @@ func NewClient(ctx context.Context, conn *websocket.Conn) (*Client, error) { watcher.WithUserIDFilter(user), ) if err != nil { - return nil, fmt.Errorf("error registering consumer: %w", err) + return nil, errors.Wrap(err, "registering consumer") } return &Client{ id: clientID.String(), @@ -72,6 +58,8 @@ func NewClient(ctx context.Context, conn *websocket.Conn) (*Client, error) { userID: user, passwordGeneration: generation, consumer: consumer, + done: make(chan struct{}), + send: make(chan []byte, 100), }, nil } @@ -128,8 +116,6 @@ func (c *Client) Start() error { defer c.mux.Unlock() c.running = true - c.send = make(chan []byte, 100) - c.done = make(chan struct{}) go c.runWatcher() go c.clientReader() @@ -143,18 +129,20 @@ func (c *Client) Write(msg []byte) (int, error) { defer c.mux.Unlock() if !c.running { - return 0, fmt.Errorf("websocket client is stopped") + return 0, fmt.Errorf("client is stopped") } tmp := make([]byte, len(msg)) copy(tmp, msg) + timer := time.NewTimer(5 * time.Second) + defer timer.Stop() select { + case <-timer.C: + return 0, fmt.Errorf("timed out sending message to client") case c.send <- tmp: - return len(tmp), nil - default: - return 0, fmt.Errorf("timed out sending message to websocket client") } + return len(tmp), nil } // clientReader waits for options changes from the client. The client can at any time diff --git a/websocket/websocket.go b/websocket/websocket.go index d1f626e6..57820449 100644 --- a/websocket/websocket.go +++ b/websocket/websocket.go @@ -1,16 +1,3 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. package websocket import ( @@ -41,15 +28,14 @@ type Hub struct { // Inbound messages from the clients. broadcast chan []byte - mux sync.Mutex - running bool - once sync.Once + mux sync.Mutex + once sync.Once } func (h *Hub) run() { - defer close(h.closed) - defer h.Stop() - + defer func() { + close(h.closed) + }() for { select { case <-h.quit: @@ -73,7 +59,8 @@ func (h *Hub) run() { for _, id := range staleClients { if client, ok := h.clients[id]; ok { if client != nil { - client.Stop() + client.conn.Close() + close(client.send) } delete(h.clients, id) } @@ -120,27 +107,17 @@ func (h *Hub) Unregister(client *Client) error { func (h *Hub) Write(msg []byte) (int, error) { tmp := make([]byte, len(msg)) copy(tmp, msg) - + timer := time.NewTimer(5 * time.Second) + defer timer.Stop() select { + case <-timer.C: + return 0, fmt.Errorf("timed out sending message to client") case h.broadcast <- tmp: - return len(tmp), nil - case <-h.quit: - return 0, fmt.Errorf("websocket hub is shutting down") - default: - return 0, fmt.Errorf("failed to broadcast over websocket") } + return len(tmp), nil } func (h *Hub) Start() error { - h.mux.Lock() - defer h.mux.Unlock() - - if h.running { - return nil - } - - h.running = true - go h.run() return nil } @@ -153,22 +130,11 @@ func (h *Hub) Close() error { } func (h *Hub) Stop() error { - h.mux.Lock() - defer h.mux.Unlock() - - if !h.running { - return nil - } - - h.running = false h.Close() return h.Wait() } func (h *Hub) Wait() error { - if !h.running { - return nil - } timer := time.NewTimer(60 * time.Second) defer timer.Stop() select { diff --git a/workers/cache/cache.go b/workers/cache/cache.go deleted file mode 100644 index 3b387f50..00000000 --- a/workers/cache/cache.go +++ /dev/null @@ -1,503 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package cache - -import ( - "context" - "fmt" - "log/slog" - "sync" - "time" - - "golang.org/x/sync/errgroup" - - "github.com/cloudbase/garm/cache" - "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/database/watcher" - "github.com/cloudbase/garm/params" - garmUtil "github.com/cloudbase/garm/util" - "github.com/cloudbase/garm/util/github" -) - -func NewWorker(ctx context.Context, store common.Store) *Worker { - consumerID := "cache" - ctx = garmUtil.WithSlogContext( - ctx, - slog.Any("worker", consumerID)) - - return &Worker{ - ctx: ctx, - store: store, - consumerID: consumerID, - toolsWorkes: make(map[string]*toolsUpdater), - quit: make(chan struct{}), - } -} - -type Worker struct { - ctx context.Context - consumerID string - - consumer common.Consumer - store common.Store - toolsWorkes map[string]*toolsUpdater - - mux sync.Mutex - running bool - quit chan struct{} -} - -func (w *Worker) setCacheForEntity(entityGetter params.EntityGetter, pools []params.Pool, scaleSets []params.ScaleSet) error { - entity, err := entityGetter.GetEntity() - if err != nil { - return fmt.Errorf("getting entity: %w", err) - } - cache.SetEntity(entity) - var entityPools []params.Pool - var entityScaleSets []params.ScaleSet - - for _, pool := range pools { - if pool.BelongsTo(entity) { - entityPools = append(entityPools, pool) - } - } - - for _, scaleSet := range scaleSets { - if scaleSet.BelongsTo(entity) { - entityScaleSets = append(entityScaleSets, scaleSet) - } - } - - cache.ReplaceEntityPools(entity.ID, entityPools) - cache.ReplaceEntityScaleSets(entity.ID, entityScaleSets) - - return nil -} - -func (w *Worker) loadAllEntities() error { - pools, err := w.store.ListAllPools(w.ctx) - if err != nil { - return fmt.Errorf("listing pools: %w", err) - } - - scaleSets, err := w.store.ListAllScaleSets(w.ctx) - if err != nil { - return fmt.Errorf("listing scale sets: %w", err) - } - - repos, err := w.store.ListRepositories(w.ctx, params.RepositoryFilter{}) - if err != nil { - return fmt.Errorf("listing repositories: %w", err) - } - - orgs, err := w.store.ListOrganizations(w.ctx, params.OrganizationFilter{}) - if err != nil { - return fmt.Errorf("listing organizations: %w", err) - } - - enterprises, err := w.store.ListEnterprises(w.ctx, params.EnterpriseFilter{}) - if err != nil { - return fmt.Errorf("listing enterprises: %w", err) - } - - for _, repo := range repos { - if err := w.setCacheForEntity(repo, pools, scaleSets); err != nil { - return fmt.Errorf("setting cache for repo: %w", err) - } - } - - for _, org := range orgs { - if err := w.setCacheForEntity(org, pools, scaleSets); err != nil { - return fmt.Errorf("setting cache for org: %w", err) - } - } - - for _, enterprise := range enterprises { - if err := w.setCacheForEntity(enterprise, pools, scaleSets); err != nil { - return fmt.Errorf("setting cache for enterprise: %w", err) - } - } - - for _, entity := range cache.GetAllEntities() { - worker := newToolsUpdater(w.ctx, entity, w.store) - if err := worker.Start(); err != nil { - return fmt.Errorf("starting tools updater: %w", err) - } - w.toolsWorkes[entity.ID] = worker - } - return nil -} - -func (w *Worker) loadAllInstances() error { - instances, err := w.store.ListAllInstances(w.ctx) - if err != nil { - return fmt.Errorf("listing instances: %w", err) - } - - for _, instance := range instances { - cache.SetInstanceCache(instance) - } - return nil -} - -func (w *Worker) loadAllGithubCredentials() error { - creds, err := w.store.ListGithubCredentials(w.ctx) - if err != nil { - return fmt.Errorf("listing github credentials: %w", err) - } - - for _, cred := range creds { - cache.SetGithubCredentials(cred) - } - return nil -} - -func (w *Worker) loadAllGiteaCredentials() error { - creds, err := w.store.ListGiteaCredentials(w.ctx) - if err != nil { - return fmt.Errorf("listing gitea credentials: %w", err) - } - - for _, cred := range creds { - cache.SetGiteaCredentials(cred) - } - return nil -} - -func (w *Worker) waitForErrorGroupOrContextCancelled(g *errgroup.Group) error { - if g == nil { - return nil - } - - done := make(chan error, 1) - go func() { - waitErr := g.Wait() - done <- waitErr - }() - - select { - case err := <-done: - return err - case <-w.ctx.Done(): - return w.ctx.Err() - case <-w.quit: - return nil - } -} - -func (w *Worker) Start() error { - slog.DebugContext(w.ctx, "starting cache worker") - w.mux.Lock() - defer w.mux.Unlock() - - if w.running { - return nil - } - - g, _ := errgroup.WithContext(w.ctx) - - g.Go(func() error { - if err := w.loadAllGithubCredentials(); err != nil { - return fmt.Errorf("loading all github credentials: %w", err) - } - return nil - }) - - g.Go(func() error { - if err := w.loadAllGiteaCredentials(); err != nil { - return fmt.Errorf("loading all gitea credentials: %w", err) - } - return nil - }) - - g.Go(func() error { - if err := w.loadAllEntities(); err != nil { - return fmt.Errorf("loading all entities: %w", err) - } - return nil - }) - - g.Go(func() error { - if err := w.loadAllInstances(); err != nil { - return fmt.Errorf("loading all instances: %w", err) - } - return nil - }) - - if err := w.waitForErrorGroupOrContextCancelled(g); err != nil { - return fmt.Errorf("waiting for error group: %w", err) - } - - consumer, err := watcher.RegisterConsumer( - w.ctx, w.consumerID, - watcher.WithAll()) - if err != nil { - return fmt.Errorf("registering consumer: %w", err) - } - w.consumer = consumer - w.running = true - w.quit = make(chan struct{}) - - go w.loop() - go w.rateLimitLoop() - return nil -} - -func (w *Worker) Stop() error { - slog.DebugContext(w.ctx, "stopping cache worker") - w.mux.Lock() - defer w.mux.Unlock() - - if !w.running { - return nil - } - - for _, worker := range w.toolsWorkes { - if err := worker.Stop(); err != nil { - slog.ErrorContext(w.ctx, "stopping tools updater", "error", err) - } - } - w.consumer.Close() - w.running = false - close(w.quit) - return nil -} - -func (w *Worker) handleEntityEvent(entityGetter params.EntityGetter, op common.OperationType) { - entity, err := entityGetter.GetEntity() - if err != nil { - slog.DebugContext(w.ctx, "getting entity from event", "error", err) - return - } - switch op { - case common.CreateOperation, common.UpdateOperation: - old, hasOld := cache.GetEntity(entity.ID) - cache.SetEntity(entity) - worker, ok := w.toolsWorkes[entity.ID] - if !ok { - worker = newToolsUpdater(w.ctx, entity, w.store) - if err := worker.Start(); err != nil { - slog.ErrorContext(w.ctx, "starting tools updater", "error", err) - return - } - w.toolsWorkes[entity.ID] = worker - } else if hasOld { - // probably an update operation - if old.Credentials.GetID() != entity.Credentials.GetID() { - worker.Reset() - } - } - case common.DeleteOperation: - cache.DeleteEntity(entity.ID) - worker, ok := w.toolsWorkes[entity.ID] - if ok { - if err := worker.Stop(); err != nil { - slog.ErrorContext(w.ctx, "stopping tools updater", "error", err) - } - delete(w.toolsWorkes, entity.ID) - } - } -} - -func (w *Worker) handleRepositoryEvent(event common.ChangePayload) { - repo, ok := event.Payload.(params.Repository) - if !ok { - slog.DebugContext(w.ctx, "invalid payload type for repository event", "payload", event.Payload) - return - } - w.handleEntityEvent(repo, event.Operation) -} - -func (w *Worker) handleOrgEvent(event common.ChangePayload) { - org, ok := event.Payload.(params.Organization) - if !ok { - slog.DebugContext(w.ctx, "invalid payload type for org event", "payload", event.Payload) - return - } - w.handleEntityEvent(org, event.Operation) -} - -func (w *Worker) handleEnterpriseEvent(event common.ChangePayload) { - enterprise, ok := event.Payload.(params.Enterprise) - if !ok { - slog.DebugContext(w.ctx, "invalid payload type for enterprise event", "payload", event.Payload) - return - } - w.handleEntityEvent(enterprise, event.Operation) -} - -func (w *Worker) handlePoolEvent(event common.ChangePayload) { - pool, ok := event.Payload.(params.Pool) - if !ok { - slog.DebugContext(w.ctx, "invalid payload type for pool event", "payload", event.Payload) - return - } - entity, err := pool.GetEntity() - if err != nil { - slog.DebugContext(w.ctx, "getting entity from pool", "error", err) - return - } - - switch event.Operation { - case common.CreateOperation, common.UpdateOperation: - cache.SetEntityPool(entity.ID, pool) - case common.DeleteOperation: - cache.DeleteEntityPool(entity.ID, pool.ID) - } -} - -func (w *Worker) handleScaleSetEvent(event common.ChangePayload) { - scaleSet, ok := event.Payload.(params.ScaleSet) - if !ok { - slog.DebugContext(w.ctx, "invalid payload type for pool event", "payload", event.Payload) - return - } - entity, err := scaleSet.GetEntity() - if err != nil { - slog.DebugContext(w.ctx, "getting entity from scale set", "error", err) - return - } - - switch event.Operation { - case common.CreateOperation, common.UpdateOperation: - cache.SetEntityScaleSet(entity.ID, scaleSet) - case common.DeleteOperation: - cache.DeleteEntityScaleSet(entity.ID, scaleSet.ID) - } -} - -func (w *Worker) handleInstanceEvent(event common.ChangePayload) { - instance, ok := event.Payload.(params.Instance) - if !ok { - slog.DebugContext(w.ctx, "invalid payload type for instance event", "payload", event.Payload) - return - } - switch event.Operation { - case common.CreateOperation, common.UpdateOperation: - cache.SetInstanceCache(instance) - case common.DeleteOperation: - cache.DeleteInstanceCache(instance.Name) - } -} - -func (w *Worker) handleCredentialsEvent(event common.ChangePayload) { - credentials, ok := event.Payload.(params.ForgeCredentials) - if !ok { - slog.DebugContext(w.ctx, "invalid payload type for credentials event", "payload", event.Payload) - return - } - switch event.Operation { - case common.CreateOperation, common.UpdateOperation: - switch credentials.ForgeType { - case params.GithubEndpointType: - cache.SetGithubCredentials(credentials) - case params.GiteaEndpointType: - cache.SetGiteaCredentials(credentials) - default: - slog.DebugContext(w.ctx, "invalid credentials type", "credentials_type", credentials.ForgeType) - return - } - entities := cache.GetEntitiesUsingCredentials(credentials) - for _, entity := range entities { - worker, ok := w.toolsWorkes[entity.ID] - if ok { - worker.Reset() - } - } - case common.DeleteOperation: - cache.DeleteGithubCredentials(credentials.ID) - } -} - -func (w *Worker) handleEvent(event common.ChangePayload) { - slog.DebugContext(w.ctx, "handling event", "event_entity_type", event.EntityType, "event_operation", event.Operation) - switch event.EntityType { - case common.PoolEntityType: - w.handlePoolEvent(event) - case common.ScaleSetEntityType: - w.handleScaleSetEvent(event) - case common.InstanceEntityType: - w.handleInstanceEvent(event) - case common.RepositoryEntityType: - w.handleRepositoryEvent(event) - case common.OrganizationEntityType: - w.handleOrgEvent(event) - case common.EnterpriseEntityType: - w.handleEnterpriseEvent(event) - case common.GithubCredentialsEntityType, common.GiteaCredentialsEntityType: - w.handleCredentialsEvent(event) - default: - slog.DebugContext(w.ctx, "unknown entity type", "entity_type", event.EntityType) - } -} - -func (w *Worker) loop() { - defer w.Stop() - for { - select { - case <-w.quit: - return - case event, ok := <-w.consumer.Watch(): - if !ok { - slog.InfoContext(w.ctx, "consumer channel closed") - return - } - w.handleEvent(event) - case <-w.ctx.Done(): - slog.DebugContext(w.ctx, "context done") - return - } - } -} - -func (w *Worker) rateLimitLoop() { - defer w.Stop() - - ticker := time.NewTicker(30 * time.Second) - defer ticker.Stop() - - for { - select { - case <-w.quit: - return - case <-w.ctx.Done(): - slog.DebugContext(w.ctx, "context done") - return - case <-ticker.C: - // update credentials rate limits - for _, creds := range cache.GetAllGithubCredentials() { - rateCli, err := github.NewRateLimitClient(w.ctx, creds) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(w.ctx, "failed to create rate limit client") - continue - } - rateLimit, err := rateCli.RateLimit(w.ctx) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext(w.ctx, "failed to get rate limit") - continue - } - if rateLimit != nil { - core := rateLimit.GetCore() - limit := params.GithubRateLimit{ - Limit: core.Limit, - Used: core.Used, - Remaining: core.Remaining, - Reset: core.Reset.Unix(), - } - cache.SetCredentialsRateLimit(creds.ID, limit) - } - } - } - } -} diff --git a/workers/cache/gitea_tools.go b/workers/cache/gitea_tools.go deleted file mode 100644 index 8410a826..00000000 --- a/workers/cache/gitea_tools.go +++ /dev/null @@ -1,196 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package cache - -import ( - "context" - "encoding/json" - "fmt" - "io" - "log/slog" - "net/http" - "strings" - "time" - - "golang.org/x/mod/semver" - - commonParams "github.com/cloudbase/garm-provider-common/params" -) - -const ( - // GiteaRunnerReleasesURL is the public API URL that returns a json of all Gitea runner releases. - // By default it returns the last 10 releases, which is enough for our needs. - GiteaRunnerReleasesURL = "https://gitea.com/api/v1/repos/gitea/act_runner/releases" - // GiteaRunnerMinimumVersion is the minimum version we need in order to support ephemeral runners. - GiteaRunnerMinimumVersion = "v0.2.12" -) - -var githubArchMapping = map[string]string{ - "x86_64": "x64", - "amd64": "x64", - "armv7l": "arm", - "aarch64": "arm64", - "x64": "x64", - "arm": "arm", - "arm64": "arm64", -} - -var nightlyActRunner = GiteaEntityTool{ - TagName: "nightly", - Name: "nightly", - TarballURL: "https://gitea.com/gitea/act_runner/archive/main.tar.gz", - Assets: []GiteaToolsAssets{ - { - Name: "act_runner-nightly-linux-amd64.xz", - DownloadURL: "https://dl.gitea.com/act_runner/nightly/act_runner-nightly-linux-amd64.xz", - }, - { - Name: "act_runner-nightly-linux-arm64.xz", - DownloadURL: "https://dl.gitea.com/act_runner/nightly/act_runner-nightly-linux-arm64.xz", - }, - { - Name: "act_runner-nightly-windows-amd64.exe.xz", - DownloadURL: "https://dl.gitea.com/act_runner/nightly/act_runner-nightly-windows-amd64.exe.xz", - }, - }, -} - -type GiteaToolsAssets struct { - ID uint `json:"id"` - Name string `json:"name"` - Size uint `json:"size"` - DownloadCount uint `json:"download_count"` - CreatedAt time.Time `json:"created_at"` - UUID string `json:"uuid"` - DownloadURL string `json:"browser_download_url"` -} - -func (g GiteaToolsAssets) GetOS() (*string, error) { - if g.Name == "" { - return nil, fmt.Errorf("gitea tools name is empty") - } - - parts := strings.SplitN(g.Name, "-", 4) - if len(parts) != 4 { - return nil, fmt.Errorf("could not parse asset name") - } - - os := parts[2] - return &os, nil -} - -func (g GiteaToolsAssets) GetArch() (*string, error) { - if g.Name == "" { - return nil, fmt.Errorf("gitea tools name is empty") - } - - parts := strings.SplitN(g.Name, "-", 4) - if len(parts) != 4 { - return nil, fmt.Errorf("could not parse asset name") - } - - archParts := strings.SplitN(parts[3], ".", 2) - if len(archParts) == 0 { - return nil, fmt.Errorf("unexpected asset name format") - } - arch := githubArchMapping[archParts[0]] - if arch == "" { - return nil, fmt.Errorf("could not find arch for %s", archParts[0]) - } - return &arch, nil -} - -type GiteaEntityTool struct { - // TagName is the semver version of the release. - TagName string `json:"tag_name"` - Name string `json:"name"` - TarballURL string `json:"tarball_url"` - Assets []GiteaToolsAssets `json:"assets"` -} - -type GiteaEntityTools []GiteaEntityTool - -func (g GiteaEntityTools) GetLatestVersion() string { - if len(g) == 0 { - return "" - } - return g[0].TagName -} - -func (g GiteaEntityTools) MinimumVersion() (GiteaEntityTool, bool) { - if len(g) == 0 { - return GiteaEntityTool{}, false - } - for _, tool := range g { - if semver.Compare(tool.TagName, GiteaRunnerMinimumVersion) >= 0 { - return tool, true - } - } - return GiteaEntityTool{}, false -} - -func getTools(ctx context.Context) ([]commonParams.RunnerApplicationDownload, error) { - resp, err := http.Get(GiteaRunnerReleasesURL) - if err != nil { - return nil, err - } - defer resp.Body.Close() - data, err := io.ReadAll(resp.Body) - if err != nil { - return nil, err - } - - var tools GiteaEntityTools - err = json.Unmarshal(data, &tools) - if err != nil { - return nil, err - } - - if len(tools) == 0 { - return nil, fmt.Errorf("no tools found") - } - - latest, ok := tools.MinimumVersion() - if !ok { - latest = nightlyActRunner - } - - ret := []commonParams.RunnerApplicationDownload{} - - for _, asset := range latest.Assets { - arch, err := asset.GetArch() - if err != nil { - slog.InfoContext(ctx, "ignoring unrecognized tools arch", "tool", asset.Name) - continue - } - os, err := asset.GetOS() - if err != nil { - slog.InfoContext(ctx, "ignoring unrecognized tools os", "tool", asset.Name) - continue - } - if !strings.HasSuffix(asset.DownloadURL, ".xz") { - // filter out non compressed versions. - continue - } - ret = append(ret, commonParams.RunnerApplicationDownload{ - OS: os, - Architecture: arch, - DownloadURL: &asset.DownloadURL, - Filename: &asset.Name, - }) - } - - return ret, nil -} diff --git a/workers/cache/tool_cache.go b/workers/cache/tool_cache.go deleted file mode 100644 index fcf66757..00000000 --- a/workers/cache/tool_cache.go +++ /dev/null @@ -1,270 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package cache - -import ( - "context" - "crypto/rand" - "fmt" - "log/slog" - "math/big" - "sync" - "time" - - commonParams "github.com/cloudbase/garm-provider-common/params" - "github.com/cloudbase/garm/cache" - "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/params" - garmUtil "github.com/cloudbase/garm/util" - "github.com/cloudbase/garm/util/github" -) - -func newToolsUpdater(ctx context.Context, entity params.ForgeEntity, store common.Store) *toolsUpdater { - return &toolsUpdater{ - ctx: ctx, - entity: entity, - quit: make(chan struct{}), - store: store, - } -} - -type toolsUpdater struct { - ctx context.Context - - entity params.ForgeEntity - tools []commonParams.RunnerApplicationDownload - lastUpdate time.Time - store common.Store - - mux sync.Mutex - running bool - quit chan struct{} - - reset chan struct{} -} - -func (t *toolsUpdater) Start() error { - t.mux.Lock() - defer t.mux.Unlock() - - if t.running { - return nil - } - - t.running = true - t.quit = make(chan struct{}) - - slog.DebugContext(t.ctx, "starting tools updater", "entity", t.entity.String(), "forge_type", t.entity.Credentials.ForgeType) - - switch t.entity.Credentials.ForgeType { - case params.GithubEndpointType: - go t.loop() - case params.GiteaEndpointType: - go t.giteaUpdateLoop() - } - return nil -} - -func (t *toolsUpdater) Stop() error { - t.mux.Lock() - defer t.mux.Unlock() - - if !t.running { - return nil - } - - t.running = false - close(t.quit) - - return nil -} - -func (t *toolsUpdater) updateTools() error { - slog.DebugContext(t.ctx, "updating tools", "entity", t.entity.String(), "forge_type", t.entity.Credentials.ForgeType) - entity, ok := cache.GetEntity(t.entity.ID) - if !ok { - return fmt.Errorf("getting entity from cache: %s", t.entity.ID) - } - ghCli, err := github.Client(t.ctx, entity) - if err != nil { - return fmt.Errorf("getting github client: %w", err) - } - - tools, err := garmUtil.FetchTools(t.ctx, ghCli) - if err != nil { - return fmt.Errorf("fetching tools: %w", err) - } - t.lastUpdate = time.Now().UTC() - t.tools = tools - - slog.DebugContext(t.ctx, "updating tools cache", "entity", t.entity.String()) - cache.SetGithubToolsCache(entity, tools) - return nil -} - -func (t *toolsUpdater) Reset() { - t.mux.Lock() - defer t.mux.Unlock() - - if !t.running { - return - } - - if t.entity.Credentials.ForgeType == params.GiteaEndpointType { - // no need to reset the gitea tools updater when credentials - // are updated. - return - } - - if t.reset != nil { - close(t.reset) - t.reset = nil - } -} - -func (t *toolsUpdater) sleepWithCancel(sleepTime time.Duration) (canceled bool) { - if sleepTime == 0 { - return false - } - ticker := time.NewTicker(sleepTime) - defer ticker.Stop() - - select { - case <-ticker.C: - return false - case <-t.quit: - case <-t.ctx.Done(): - } - return true -} - -// giteaUpdateLoop updates tools for gitea. The act runner can be downloaded -// without a token, unlike the github tools, which for GHES require a token. -func (t *toolsUpdater) giteaUpdateLoop() { - defer t.Stop() - - // add some jitter. When spinning up multiple entities, we add - // jitter to prevent stampeeding herd. - randInt, err := rand.Int(rand.Reader, big.NewInt(3000)) - if err != nil { - randInt = big.NewInt(0) - } - t.sleepWithCancel(time.Duration(randInt.Int64()) * time.Millisecond) - tools, err := getTools(t.ctx) - if err != nil { - t.addStatusEvent(fmt.Sprintf("failed to update gitea tools: %q", err), params.EventError) - } else { - t.addStatusEvent("successfully updated tools", params.EventInfo) - cache.SetGithubToolsCache(t.entity, tools) - } - - // Once every 3 hours should be enough. Tools don't expire. - ticker := time.NewTicker(3 * time.Hour) - - for { - select { - case <-t.quit: - slog.DebugContext(t.ctx, "stopping tools updater") - return - case <-t.ctx.Done(): - return - case <-ticker.C: - tools, err := getTools(t.ctx) - if err != nil { - t.addStatusEvent(fmt.Sprintf("failed to update gitea tools: %q", err), params.EventError) - slog.DebugContext(t.ctx, "failed to update gitea tools", "error", err) - continue - } - t.addStatusEvent("successfully updated tools", params.EventInfo) - cache.SetGithubToolsCache(t.entity, tools) - } - } -} - -func (t *toolsUpdater) loop() { - defer t.Stop() - - // add some jitter. When spinning up multiple entities, we add - // jitter to prevent stampeeding herd. - randInt, err := rand.Int(rand.Reader, big.NewInt(3000)) - if err != nil { - randInt = big.NewInt(0) - } - t.sleepWithCancel(time.Duration(randInt.Int64()) * time.Millisecond) - - var resetTime time.Time - now := time.Now().UTC() - if now.After(t.lastUpdate.Add(40 * time.Minute)) { - if err := t.updateTools(); err != nil { - slog.ErrorContext(t.ctx, "updating tools", "error", err) - t.addStatusEvent(fmt.Sprintf("failed to update tools: %q", err), params.EventError) - resetTime = now.Add(5 * time.Minute) - } else { - // Tools are usually valid for 1 hour. - resetTime = t.lastUpdate.Add(40 * time.Minute) - t.addStatusEvent("successfully updated tools", params.EventInfo) - } - } - - for { - if t.reset == nil { - t.reset = make(chan struct{}) - } - // add some jitter - randInt, err := rand.Int(rand.Reader, big.NewInt(300)) - if err != nil { - randInt = big.NewInt(0) - } - timer := time.NewTimer(resetTime.Sub(now) + time.Duration(randInt.Int64())*time.Second) - select { - case <-t.quit: - slog.DebugContext(t.ctx, "stopping tools updater") - timer.Stop() - return - case <-timer.C: - slog.DebugContext(t.ctx, "updating tools") - now = time.Now().UTC() - if err := t.updateTools(); err != nil { - slog.ErrorContext(t.ctx, "updating tools", "error", err) - t.addStatusEvent(fmt.Sprintf("failed to update tools: %q", err), params.EventError) - resetTime = now.Add(5 * time.Minute) - } else { - // Tools are usually valid for 1 hour. - resetTime = t.lastUpdate.Add(40 * time.Minute) - t.addStatusEvent("successfully updated tools", params.EventInfo) - } - case <-t.reset: - slog.DebugContext(t.ctx, "resetting tools updater") - timer.Stop() - now = time.Now().UTC() - if err := t.updateTools(); err != nil { - slog.ErrorContext(t.ctx, "updating tools", "error", err) - t.addStatusEvent(fmt.Sprintf("failed to update tools: %q", err), params.EventError) - resetTime = now.Add(5 * time.Minute) - } else { - // Tools are usually valid for 1 hour. - resetTime = t.lastUpdate.Add(40 * time.Minute) - t.addStatusEvent("successfully updated tools", params.EventInfo) - } - } - timer.Stop() - } -} - -func (t *toolsUpdater) addStatusEvent(msg string, level params.EventLevel) { - if err := t.store.AddEntityEvent(t.ctx, t.entity, params.StatusEvent, level, msg, 30); err != nil { - slog.With(slog.Any("error", err)).Error("failed to add entity event") - } -} diff --git a/workers/common/interfaces.go b/workers/common/interfaces.go deleted file mode 100644 index a04f16a6..00000000 --- a/workers/common/interfaces.go +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package common - -import ( - commonParams "github.com/cloudbase/garm-provider-common/params" -) - -type ToolsGetter interface { - GetTools() ([]commonParams.RunnerApplicationDownload, error) -} diff --git a/workers/entity/controller.go b/workers/entity/controller.go deleted file mode 100644 index 3ad52108..00000000 --- a/workers/entity/controller.go +++ /dev/null @@ -1,249 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package entity - -import ( - "context" - "fmt" - "log/slog" - "sync" - - "golang.org/x/sync/errgroup" - - "github.com/cloudbase/garm/auth" - dbCommon "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/database/watcher" - "github.com/cloudbase/garm/params" - "github.com/cloudbase/garm/runner/common" - garmUtil "github.com/cloudbase/garm/util" -) - -func NewController(ctx context.Context, store dbCommon.Store, providers map[string]common.Provider) (*Controller, error) { - consumerID := "entity-controller" - ctx = garmUtil.WithSlogContext( - ctx, - slog.Any("worker", consumerID)) - ctx = auth.GetAdminContext(ctx) - - return &Controller{ - consumerID: consumerID, - ctx: ctx, - store: store, - providers: providers, - Entities: make(map[string]*Worker), - }, nil -} - -type Controller struct { - consumerID string - ctx context.Context - - consumer dbCommon.Consumer - store dbCommon.Store - - providers map[string]common.Provider - Entities map[string]*Worker - - running bool - quit chan struct{} - - mux sync.Mutex -} - -func (c *Controller) loadAllRepositories() error { - c.mux.Lock() - defer c.mux.Unlock() - repos, err := c.store.ListRepositories(c.ctx, params.RepositoryFilter{}) - if err != nil { - return fmt.Errorf("fetching repositories: %w", err) - } - - g, _ := errgroup.WithContext(c.ctx) - for _, repo := range repos { - g.Go(func() error { - entity, err := repo.GetEntity() - if err != nil { - return fmt.Errorf("getting entity: %w", err) - } - worker, err := NewWorker(c.ctx, c.store, entity, c.providers) - if err != nil { - return fmt.Errorf("creating worker: %w", err) - } - if err := worker.Start(); err != nil { - return fmt.Errorf("starting worker: %w", err) - } - c.Entities[entity.ID] = worker - return nil - }) - } - if err := c.waitForErrorGroupOrContextCancelled(g); err != nil { - return fmt.Errorf("waiting for error group: %w", err) - } - return nil -} - -func (c *Controller) loadAllOrganizations() error { - c.mux.Lock() - defer c.mux.Unlock() - orgs, err := c.store.ListOrganizations(c.ctx, params.OrganizationFilter{}) - if err != nil { - return fmt.Errorf("fetching organizations: %w", err) - } - - g, _ := errgroup.WithContext(c.ctx) - for _, org := range orgs { - g.Go(func() error { - entity, err := org.GetEntity() - if err != nil { - return fmt.Errorf("getting entity: %w", err) - } - worker, err := NewWorker(c.ctx, c.store, entity, c.providers) - if err != nil { - return fmt.Errorf("creating worker: %w", err) - } - if err := worker.Start(); err != nil { - return fmt.Errorf("starting worker: %w", err) - } - c.Entities[entity.ID] = worker - return nil - }) - } - if err := c.waitForErrorGroupOrContextCancelled(g); err != nil { - return fmt.Errorf("waiting for error group: %w", err) - } - return nil -} - -func (c *Controller) loadAllEnterprises() error { - c.mux.Lock() - defer c.mux.Unlock() - enterprises, err := c.store.ListEnterprises(c.ctx, params.EnterpriseFilter{}) - if err != nil { - return fmt.Errorf("fetching enterprises: %w", err) - } - - g, _ := errgroup.WithContext(c.ctx) - - for _, enterprise := range enterprises { - g.Go(func() error { - entity, err := enterprise.GetEntity() - if err != nil { - return fmt.Errorf("getting entity: %w", err) - } - worker, err := NewWorker(c.ctx, c.store, entity, c.providers) - if err != nil { - return fmt.Errorf("creating worker: %w", err) - } - if err := worker.Start(); err != nil { - return fmt.Errorf("starting worker: %w", err) - } - c.Entities[entity.ID] = worker - return nil - }) - } - if err := c.waitForErrorGroupOrContextCancelled(g); err != nil { - return fmt.Errorf("waiting for error group: %w", err) - } - return nil -} - -func (c *Controller) Start() error { - c.mux.Lock() - if c.running { - c.mux.Unlock() - return nil - } - c.mux.Unlock() - - g, _ := errgroup.WithContext(c.ctx) - g.Go(func() error { - if err := c.loadAllEnterprises(); err != nil { - return fmt.Errorf("loading enterprises: %w", err) - } - return nil - }) - - g.Go(func() error { - if err := c.loadAllOrganizations(); err != nil { - return fmt.Errorf("loading organizations: %w", err) - } - return nil - }) - - g.Go(func() error { - if err := c.loadAllRepositories(); err != nil { - return fmt.Errorf("loading repositories: %w", err) - } - return nil - }) - - if err := c.waitForErrorGroupOrContextCancelled(g); err != nil { - return fmt.Errorf("waiting for error group: %w", err) - } - - consumer, err := watcher.RegisterConsumer( - c.ctx, c.consumerID, - composeControllerWatcherFilters(), - ) - if err != nil { - return fmt.Errorf("failed to create consumer for entity controller: %w", err) - } - - c.mux.Lock() - c.consumer = consumer - c.running = true - c.quit = make(chan struct{}) - c.mux.Unlock() - - go c.loop() - - return nil -} - -func (c *Controller) Stop() error { - slog.DebugContext(c.ctx, "stopping entity controller", "entity", c.consumerID) - c.mux.Lock() - defer c.mux.Unlock() - if !c.running { - return nil - } - slog.DebugContext(c.ctx, "stopping entity controller") - - for entityID, worker := range c.Entities { - if err := worker.Stop(); err != nil { - slog.ErrorContext(c.ctx, "stopping worker for entity", "entity_id", entityID, "error", err) - } - } - - c.running = false - close(c.quit) - c.consumer.Close() - slog.DebugContext(c.ctx, "stopped entity controller", "entity", c.consumerID) - return nil -} - -func (c *Controller) loop() { - defer c.Stop() - for { - select { - case payload := <-c.consumer.Watch(): - slog.InfoContext(c.ctx, "received payload") - go c.handleWatcherEvent(payload) - case <-c.ctx.Done(): - return - case <-c.quit: - return - } - } -} diff --git a/workers/entity/controller_watcher.go b/workers/entity/controller_watcher.go deleted file mode 100644 index d907d25a..00000000 --- a/workers/entity/controller_watcher.go +++ /dev/null @@ -1,140 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package entity - -import ( - "fmt" - "log/slog" - - dbCommon "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/params" -) - -func (c *Controller) handleWatcherEvent(event dbCommon.ChangePayload) { - var entityGetter params.EntityGetter - switch event.EntityType { - case dbCommon.RepositoryEntityType: - slog.DebugContext(c.ctx, "got repository payload event") - repo, ok := event.Payload.(params.Repository) - if !ok { - slog.ErrorContext(c.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) - return - } - entityGetter = repo - case dbCommon.OrganizationEntityType: - slog.DebugContext(c.ctx, "got organization payload event") - org, ok := event.Payload.(params.Organization) - if !ok { - slog.ErrorContext(c.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) - return - } - entityGetter = org - case dbCommon.EnterpriseEntityType: - slog.DebugContext(c.ctx, "got enterprise payload event") - ent, ok := event.Payload.(params.Enterprise) - if !ok { - slog.ErrorContext(c.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) - return - } - entityGetter = ent - default: - slog.ErrorContext(c.ctx, "invalid entity type", "entity_type", event.EntityType) - return - } - - entity, err := entityGetter.GetEntity() - if err != nil { - slog.ErrorContext(c.ctx, "getting entity from repository", "entity_type", event.EntityType, "payload", event.Payload, "error", err) - return - } - - switch event.Operation { - case dbCommon.CreateOperation: - slog.DebugContext(c.ctx, "got create operation") - c.handleWatcherCreateOperation(entity) - case dbCommon.DeleteOperation: - slog.DebugContext(c.ctx, "got delete operation") - c.handleWatcherDeleteOperation(entity) - case dbCommon.UpdateOperation: - slog.DebugContext(c.ctx, "got update operation") - c.handleWatcherUpdateOperation(entity) - default: - slog.ErrorContext(c.ctx, "invalid operation type", "operation_type", event.Operation) - return - } -} - -func (c *Controller) handleWatcherUpdateOperation(entity params.ForgeEntity) { - c.mux.Lock() - defer c.mux.Unlock() - - worker, ok := c.Entities[entity.ID] - if !ok { - slog.InfoContext(c.ctx, "entity not found in worker list", "entity_id", entity.ID) - return - } - - if worker.IsRunning() { - // The worker is running. It watches for updates to its own entity. We only care about updates - // in the controller, if for some reason, the worker is not running. - slog.DebugContext(c.ctx, "worker is already running, skipping update", "entity_id", entity.ID) - return - } - - slog.InfoContext(c.ctx, "updating entity worker", "entity_id", entity.ID, "entity_type", entity.EntityType) - worker.Entity = entity - if err := worker.Start(); err != nil { - slog.ErrorContext(c.ctx, "starting worker after update", "entity_id", entity.ID, "error", err) - worker.addStatusEvent(fmt.Sprintf("failed to start worker for %s (%s) after update: %s", entity.ID, entity.ForgeURL(), err.Error()), params.EventError) - return - } - slog.InfoContext(c.ctx, "entity worker updated and successfully started", "entity_id", entity.ID, "entity_type", entity.EntityType) - worker.addStatusEvent(fmt.Sprintf("worker updated and successfully started for entity: %s (%s)", entity.ID, entity.ForgeURL()), params.EventInfo) -} - -func (c *Controller) handleWatcherCreateOperation(entity params.ForgeEntity) { - c.mux.Lock() - defer c.mux.Unlock() - - worker, err := NewWorker(c.ctx, c.store, entity, c.providers) - if err != nil { - slog.ErrorContext(c.ctx, "creating worker from repository", "entity_type", entity.EntityType, "error", err) - return - } - - slog.InfoContext(c.ctx, "starting entity worker", "entity_id", entity.ID, "entity_type", entity.EntityType) - if err := worker.Start(); err != nil { - slog.ErrorContext(c.ctx, "starting worker", "entity_id", entity.ID, "error", err) - return - } - - c.Entities[entity.ID] = worker -} - -func (c *Controller) handleWatcherDeleteOperation(entity params.ForgeEntity) { - c.mux.Lock() - defer c.mux.Unlock() - - worker, ok := c.Entities[entity.ID] - if !ok { - slog.InfoContext(c.ctx, "entity not found in worker list", "entity_id", entity.ID) - return - } - slog.InfoContext(c.ctx, "stopping entity worker", "entity_id", entity.ID, "entity_type", entity.EntityType) - if err := worker.Stop(); err != nil { - slog.ErrorContext(c.ctx, "stopping worker", "entity_id", entity.ID, "error", err) - return - } - delete(c.Entities, entity.ID) -} diff --git a/workers/entity/util.go b/workers/entity/util.go deleted file mode 100644 index 23dbc488..00000000 --- a/workers/entity/util.go +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package entity - -import ( - "strings" - - "golang.org/x/sync/errgroup" - - dbCommon "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/database/watcher" - "github.com/cloudbase/garm/params" -) - -const ( - // These are duplicated until we decide if we move the pool manager to the new - // worker flow. - poolIDLabelprefix = "runner-pool-id:" -) - -func composeControllerWatcherFilters() dbCommon.PayloadFilterFunc { - return watcher.WithAll( - watcher.WithAny( - watcher.WithEntityTypeFilter(dbCommon.RepositoryEntityType), - watcher.WithEntityTypeFilter(dbCommon.OrganizationEntityType), - watcher.WithEntityTypeFilter(dbCommon.EnterpriseEntityType), - ), - watcher.WithAny( - watcher.WithOperationTypeFilter(dbCommon.CreateOperation), - watcher.WithOperationTypeFilter(dbCommon.DeleteOperation), - watcher.WithOperationTypeFilter(dbCommon.UpdateOperation), - ), - ) -} - -func composeWorkerWatcherFilters(entity params.ForgeEntity) dbCommon.PayloadFilterFunc { - return watcher.WithAny( - watcher.WithAll( - watcher.WithEntityFilter(entity), - watcher.WithOperationTypeFilter(dbCommon.UpdateOperation), - ), - // Watch for credentials updates. - watcher.WithAll( - watcher.WithForgeCredentialsFilter(entity.Credentials), - watcher.WithOperationTypeFilter(dbCommon.UpdateOperation), - ), - ) -} - -func (c *Controller) waitForErrorGroupOrContextCancelled(g *errgroup.Group) error { - if g == nil { - return nil - } - - done := make(chan error, 1) - go func() { - waitErr := g.Wait() - done <- waitErr - }() - - select { - case err := <-done: - return err - case <-c.ctx.Done(): - return c.ctx.Err() - case <-c.quit: - return nil - } -} - -func poolIDFromLabels(runner params.RunnerReference) string { - for _, lbl := range runner.Labels { - if strings.HasPrefix(lbl.Name, poolIDLabelprefix) { - return lbl.Name[len(poolIDLabelprefix):] - } - } - return "" -} diff --git a/workers/entity/worker.go b/workers/entity/worker.go deleted file mode 100644 index d16c15f5..00000000 --- a/workers/entity/worker.go +++ /dev/null @@ -1,259 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package entity - -import ( - "context" - "fmt" - "log/slog" - "sync" - "time" - - "golang.org/x/sync/errgroup" - - "github.com/cloudbase/garm/cache" - dbCommon "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/database/watcher" - "github.com/cloudbase/garm/params" - "github.com/cloudbase/garm/runner/common" - garmUtil "github.com/cloudbase/garm/util" - "github.com/cloudbase/garm/util/github" - "github.com/cloudbase/garm/util/github/scalesets" - "github.com/cloudbase/garm/workers/scaleset" -) - -func NewWorker(ctx context.Context, store dbCommon.Store, entity params.ForgeEntity, providers map[string]common.Provider) (*Worker, error) { - consumerID := fmt.Sprintf("entity-worker-%s", entity.ID) - - ctx = garmUtil.WithSlogContext( - ctx, - slog.Any("worker", consumerID)) - - return &Worker{ - ctx: ctx, - consumerID: consumerID, - store: store, - Entity: entity, - providers: providers, - }, nil -} - -type Worker struct { - ctx context.Context - consumerID string - - consumer dbCommon.Consumer - store dbCommon.Store - ghCli common.GithubClient - - Entity params.ForgeEntity - providers map[string]common.Provider - scaleSetController *scaleset.Controller - - mux sync.Mutex - running bool - quit chan struct{} -} - -func (w *Worker) Stop() error { - slog.DebugContext(w.ctx, "stopping entity worker", "entity", w.consumerID) - w.mux.Lock() - defer w.mux.Unlock() - - if !w.running { - return nil - } - slog.DebugContext(w.ctx, "stopping entity worker") - - if err := w.scaleSetController.Stop(); err != nil { - return fmt.Errorf("stopping scale set controller: %w", err) - } - - w.running = false - close(w.quit) - w.consumer.Close() - slog.DebugContext(w.ctx, "entity worker stopped", "entity", w.consumerID) - return nil -} - -func (w *Worker) Start() (err error) { - slog.DebugContext(w.ctx, "starting entity worker", "entity", w.consumerID) - w.mux.Lock() - defer w.mux.Unlock() - - epType, err := w.Entity.GetForgeType() - if err != nil { - return fmt.Errorf("failed to get endpoint type: %w", err) - } - if epType != params.GithubEndpointType { - return nil - } - - ghCli, err := github.Client(w.ctx, w.Entity) - if err != nil { - return fmt.Errorf("creating github client: %w", err) - } - w.ghCli = ghCli - cache.SetGithubClient(w.Entity.ID, ghCli) - - scaleSetController, err := scaleset.NewController(w.ctx, w.store, w.Entity, w.providers) - if err != nil { - return fmt.Errorf("creating scale set controller: %w", err) - } - - if err := scaleSetController.Start(); err != nil { - return fmt.Errorf("starting scale set controller: %w", err) - } - w.scaleSetController = scaleSetController - - defer func() { - if err != nil { - w.scaleSetController.Stop() - } - }() - - consumer, err := watcher.RegisterConsumer( - w.ctx, w.consumerID, - composeWorkerWatcherFilters(w.Entity), - ) - if err != nil { - return fmt.Errorf("registering consumer: %w", err) - } - w.consumer = consumer - - w.running = true - w.quit = make(chan struct{}) - - go w.loop() - go w.consolidateRunnerLoop() - return nil -} - -func (w *Worker) IsRunning() bool { - w.mux.Lock() - defer w.mux.Unlock() - return w.running -} - -// consolidateRunnerState will list all runners on GitHub for this entity, sort by -// pool or scale set and pass those runners to the appropriate controller (pools or scale sets). -// The controller will then pass along to their respective workers the list of runners -// they should be responsible for. The workers will then cross check the current state -// from github with their local state and reconcile any differences. This cleans up -// any runners that have been removed out of band in either the provider or github. -func (w *Worker) consolidateRunnerState() error { - scaleSetCli, err := scalesets.NewClient(w.ghCli) - if err != nil { - return fmt.Errorf("creating scaleset client: %w", err) - } - // Client is scoped to the current entity. Only runners in a repo/org/enterprise - // will be listed. - runners, err := scaleSetCli.ListAllRunners(w.ctx) - if err != nil { - return fmt.Errorf("listing runners: %w", err) - } - - byPoolID := make(map[string][]params.RunnerReference) - byScaleSetID := make(map[int][]params.RunnerReference) - for _, runner := range runners.RunnerReferences { - if runner.RunnerScaleSetID != 0 { - byScaleSetID[runner.RunnerScaleSetID] = append(byScaleSetID[runner.RunnerScaleSetID], runner) - } else { - poolID := poolIDFromLabels(runner) - if poolID == "" { - continue - } - byPoolID[poolID] = append(byPoolID[poolID], runner) - } - } - - g, ctx := errgroup.WithContext(w.ctx) - g.Go(func() error { - slog.DebugContext(ctx, "consolidating scale set runners", "entity", w.Entity.String(), "runners", runners) - if err := w.scaleSetController.ConsolidateRunnerState(byScaleSetID); err != nil { - return fmt.Errorf("consolidating runners for scale set: %w", err) - } - return nil - }) - - if err := w.waitForErrorGroupOrContextCancelled(g); err != nil { - return fmt.Errorf("waiting for error group: %w", err) - } - return nil -} - -func (w *Worker) waitForErrorGroupOrContextCancelled(g *errgroup.Group) error { - if g == nil { - return nil - } - - done := make(chan error, 1) - go func() { - waitErr := g.Wait() - done <- waitErr - }() - - select { - case err := <-done: - return err - case <-w.ctx.Done(): - return w.ctx.Err() - case <-w.quit: - return nil - } -} - -func (w *Worker) consolidateRunnerLoop() { - ticker := time.NewTicker(common.PoolReapTimeoutInterval) - defer ticker.Stop() - - for { - select { - case _, ok := <-ticker.C: - if !ok { - slog.InfoContext(w.ctx, "consolidate ticker closed") - return - } - if err := w.consolidateRunnerState(); err != nil { - w.addStatusEvent(fmt.Sprintf("failed to consolidate runner state: %q", err.Error()), params.EventError) - slog.With(slog.Any("error", err)).Error("failed to consolidate runner state") - } - case <-w.ctx.Done(): - return - case <-w.quit: - return - } - } -} - -func (w *Worker) loop() { - defer w.Stop() - for { - select { - case payload := <-w.consumer.Watch(): - slog.InfoContext(w.ctx, "received payload") - w.handleWorkerWatcherEvent(payload) - case <-w.ctx.Done(): - return - case <-w.quit: - return - } - } -} - -func (w *Worker) addStatusEvent(msg string, level params.EventLevel) { - if err := w.store.AddEntityEvent(w.ctx, w.Entity, params.StatusEvent, level, msg, 30); err != nil { - slog.With(slog.Any("error", err)).Error("failed to add entity event") - } -} diff --git a/workers/entity/worker_watcher.go b/workers/entity/worker_watcher.go deleted file mode 100644 index ce8fd244..00000000 --- a/workers/entity/worker_watcher.go +++ /dev/null @@ -1,114 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package entity - -import ( - "log/slog" - - "github.com/cloudbase/garm/cache" - dbCommon "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/params" - "github.com/cloudbase/garm/util/github" -) - -func (w *Worker) handleWorkerWatcherEvent(event dbCommon.ChangePayload) { - // This worker may be for a repo, org or enterprise. React only to the entity type - // that this worker is for. - entityType := dbCommon.DatabaseEntityType(w.Entity.EntityType) - switch event.EntityType { - case entityType: - w.handleEntityEventPayload(event) - case dbCommon.GithubCredentialsEntityType, dbCommon.GiteaCredentialsEntityType: - slog.DebugContext(w.ctx, "got github credentials payload event") - w.handleEntityCredentialsEventPayload(event) - default: - slog.DebugContext(w.ctx, "invalid entity type; ignoring", "entity_type", event.EntityType) - } -} - -func (w *Worker) handleEntityEventPayload(event dbCommon.ChangePayload) { - entityGetter, ok := event.Payload.(params.EntityGetter) - if !ok { - slog.ErrorContext(w.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) - return - } - entity, err := entityGetter.GetEntity() - if err != nil { - slog.ErrorContext(w.ctx, "getting entity from repository", "entity_type", event.EntityType, "payload", event.Payload, "error", err) - return - } - - switch event.Operation { - case dbCommon.UpdateOperation: - slog.DebugContext(w.ctx, "got update operation") - w.mux.Lock() - defer w.mux.Unlock() - - credentials := entity.Credentials - if w.Entity.Credentials.GetID() != credentials.GetID() { - // credentials were swapped on the entity. We need to recompose the watcher - // filters. - w.consumer.SetFilters(composeWorkerWatcherFilters(entity)) - ghCli, err := github.Client(w.ctx, entity) - if err != nil { - slog.ErrorContext(w.ctx, "creating github client", "entity_id", entity.ID, "error", err) - return - } - w.ghCli = ghCli - cache.SetGithubClient(entity.ID, ghCli) - } - w.Entity = entity - default: - slog.ErrorContext(w.ctx, "invalid operation type", "operation_type", event.Operation) - } -} - -func (w *Worker) handleEntityCredentialsEventPayload(event dbCommon.ChangePayload) { - var creds params.ForgeCredentials - var ok bool - switch event.EntityType { - case dbCommon.GithubCredentialsEntityType, dbCommon.GiteaCredentialsEntityType: - creds, ok = event.Payload.(params.ForgeCredentials) - default: - slog.ErrorContext(w.ctx, "invalid entity type", "entity_type", event.EntityType) - return - } - if !ok { - slog.ErrorContext(w.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) - return - } - - switch event.Operation { - case dbCommon.UpdateOperation: - slog.DebugContext(w.ctx, "got delete operation") - w.mux.Lock() - defer w.mux.Unlock() - if w.Entity.Credentials.GetID() != creds.GetID() { - // The channel is buffered. We may get an old update. If credentials get updated - // immediately after they are swapped on the entity, we may still get an update - // pushed to the channel before the filters are swapped. We can ignore the update. - return - } - w.Entity.Credentials = creds - ghCli, err := github.Client(w.ctx, w.Entity) - if err != nil { - slog.ErrorContext(w.ctx, "creating github client", "entity_id", w.Entity.ID, "error", err) - return - } - w.ghCli = ghCli - cache.SetGithubClient(w.Entity.ID, ghCli) - default: - slog.ErrorContext(w.ctx, "invalid operation type", "operation_type", event.Operation) - } -} diff --git a/workers/provider/errors.go b/workers/provider/errors.go deleted file mode 100644 index 7c9247dc..00000000 --- a/workers/provider/errors.go +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package provider - -import "fmt" - -var ErrInstanceDeleted = fmt.Errorf("instance deleted") diff --git a/workers/provider/instance_manager.go b/workers/provider/instance_manager.go deleted file mode 100644 index 84e5bcca..00000000 --- a/workers/provider/instance_manager.go +++ /dev/null @@ -1,438 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package provider - -import ( - "context" - "errors" - "fmt" - "log/slog" - "sync" - "time" - - runnerErrors "github.com/cloudbase/garm-provider-common/errors" - commonParams "github.com/cloudbase/garm-provider-common/params" - "github.com/cloudbase/garm/cache" - dbCommon "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/params" - "github.com/cloudbase/garm/runner/common" - garmUtil "github.com/cloudbase/garm/util" -) - -func newInstanceManager(ctx context.Context, instance params.Instance, scaleSet params.ScaleSet, provider common.Provider, helper providerHelper) (*instanceManager, error) { - ctx = garmUtil.WithSlogContext(ctx, slog.Any("worker", fmt.Sprintf("instance-worker-%s", instance.Name))) - - githubEntity, err := scaleSet.GetEntity() - if err != nil { - return nil, fmt.Errorf("getting github entity: %w", err) - } - return &instanceManager{ - ctx: ctx, - instance: instance, - provider: provider, - deleteBackoff: time.Second * 0, - scaleSet: scaleSet, - helper: helper, - scaleSetEntity: githubEntity, - }, nil -} - -// instanceManager handles the lifecycle of a single instance. -// When an instance is created, a new instance manager is created -// for it. When the instance is placed in pending_create, the manager -// will attempt to create a new compute resource in the designated -// provider. Finally, when an instance is marked as pending_delete, it is removed -// from the provider and on success the instance is marked as deleted. Failure to -// delete, will place the instance back in pending delete. The removal process is -// retried after a backoff period. Instances placed in force_pending_delete will -// ignore provider errors and exit. -type instanceManager struct { - ctx context.Context - - instance params.Instance - provider common.Provider - helper providerHelper - - scaleSet params.ScaleSet - scaleSetEntity params.ForgeEntity - - deleteBackoff time.Duration - - updates chan dbCommon.ChangePayload - mux sync.Mutex - running bool - quit chan struct{} -} - -func (i *instanceManager) Start() error { - i.mux.Lock() - defer i.mux.Unlock() - - slog.DebugContext(i.ctx, "starting instance manager", "instance", i.instance.Name) - if i.running { - return nil - } - - i.running = true - i.quit = make(chan struct{}) - i.updates = make(chan dbCommon.ChangePayload) - - go i.loop() - go i.updatesLoop() - return nil -} - -func (i *instanceManager) Stop() error { - i.mux.Lock() - defer i.mux.Unlock() - - if !i.running { - return nil - } - - i.running = false - close(i.quit) - close(i.updates) - return nil -} - -func (i *instanceManager) sleepForBackOffOrCanceled() bool { - timer := time.NewTimer(i.deleteBackoff) - defer timer.Stop() - - slog.DebugContext(i.ctx, "sleeping for backoff", "duration", i.deleteBackoff, "instance", i.instance.Name) - select { - case <-timer.C: - return false - case <-i.quit: - return true - case <-i.ctx.Done(): - return true - } -} - -func (i *instanceManager) incrementBackOff() { - if i.deleteBackoff == 0 { - i.deleteBackoff = time.Second * 1 - } else { - i.deleteBackoff *= 2 - } - if i.deleteBackoff > time.Minute*5 { - i.deleteBackoff = time.Minute * 5 - } -} - -func (i *instanceManager) getEntity() (params.ForgeEntity, error) { - entity, err := i.scaleSet.GetEntity() - if err != nil { - return params.ForgeEntity{}, fmt.Errorf("getting entity: %w", err) - } - ghEntity, err := i.helper.GetGithubEntity(entity) - if err != nil { - return params.ForgeEntity{}, fmt.Errorf("getting entity: %w", err) - } - return ghEntity, nil -} - -func (i *instanceManager) pseudoPoolID() string { - // This is temporary. We need to extend providers to know about scale sets. - return fmt.Sprintf("%s-%s", i.scaleSet.Name, i.scaleSetEntity.ID) -} - -func (i *instanceManager) handleCreateInstanceInProvider(instance params.Instance) error { - entity, err := i.getEntity() - if err != nil { - return fmt.Errorf("getting entity: %w", err) - } - - token, err := i.helper.InstanceTokenGetter().NewInstanceJWTToken( - instance, entity, entity.EntityType, i.scaleSet.RunnerBootstrapTimeout) - if err != nil { - return fmt.Errorf("creating instance token: %w", err) - } - tools, err := cache.GetGithubToolsCache(entity.ID) - if err != nil { - return fmt.Errorf("tools not found in cache for entity %s: %w", entity.String(), err) - } - - bootstrapArgs := commonParams.BootstrapInstance{ - Name: instance.Name, - Tools: tools, - RepoURL: entity.ForgeURL(), - MetadataURL: instance.MetadataURL, - CallbackURL: instance.CallbackURL, - InstanceToken: token, - OSArch: i.scaleSet.OSArch, - OSType: i.scaleSet.OSType, - Flavor: i.scaleSet.Flavor, - Image: i.scaleSet.Image, - ExtraSpecs: i.scaleSet.ExtraSpecs, - // This is temporary. We need to extend providers to know about scale sets. - PoolID: i.pseudoPoolID(), - CACertBundle: entity.Credentials.CABundle, - GitHubRunnerGroup: i.scaleSet.GitHubRunnerGroup, - JitConfigEnabled: true, - } - - var instanceIDToDelete string - baseParams, err := i.getProviderBaseParams() - if err != nil { - return fmt.Errorf("getting provider base params: %w", err) - } - - defer func() { - if instanceIDToDelete != "" { - deleteInstanceParams := common.DeleteInstanceParams{ - DeleteInstanceV011: common.DeleteInstanceV011Params{ - ProviderBaseParams: baseParams, - }, - } - if err := i.provider.DeleteInstance(i.ctx, instanceIDToDelete, deleteInstanceParams); err != nil { - if !errors.Is(err, runnerErrors.ErrNotFound) { - slog.With(slog.Any("error", err)).ErrorContext( - i.ctx, "failed to cleanup instance", - "provider_id", instanceIDToDelete) - } - } - } - }() - - createInstanceParams := common.CreateInstanceParams{ - CreateInstanceV011: common.CreateInstanceV011Params{ - ProviderBaseParams: baseParams, - }, - } - - providerInstance, err := i.provider.CreateInstance(i.ctx, bootstrapArgs, createInstanceParams) - if err != nil { - instanceIDToDelete = instance.Name - return fmt.Errorf("creating instance in provider: %w", err) - } - - if providerInstance.Status == commonParams.InstanceError { - instanceIDToDelete = instance.ProviderID - if instanceIDToDelete == "" { - instanceIDToDelete = instance.Name - } - } - - updated, err := i.helper.updateArgsFromProviderInstance(instance.Name, providerInstance) - if err != nil { - return fmt.Errorf("updating instance args: %w", err) - } - i.instance = updated - - return nil -} - -func (i *instanceManager) getProviderBaseParams() (common.ProviderBaseParams, error) { - info, err := i.helper.GetControllerInfo() - if err != nil { - return common.ProviderBaseParams{}, fmt.Errorf("getting controller info: %w", err) - } - - return common.ProviderBaseParams{ - ControllerInfo: info, - }, nil -} - -func (i *instanceManager) handleDeleteInstanceInProvider(instance params.Instance) error { - slog.InfoContext(i.ctx, "deleting instance in provider", "runner_name", instance.Name) - identifier := instance.ProviderID - if identifier == "" { - // provider did not return a provider ID? - // try with name - identifier = instance.Name - } - - baseParams, err := i.getProviderBaseParams() - if err != nil { - return fmt.Errorf("getting provider base params: %w", err) - } - - slog.DebugContext( - i.ctx, "calling delete instance on provider", - "runner_name", instance.Name, - "provider_id", identifier) - - deleteInstanceParams := common.DeleteInstanceParams{ - DeleteInstanceV011: common.DeleteInstanceV011Params{ - ProviderBaseParams: baseParams, - }, - } - if err := i.provider.DeleteInstance(i.ctx, identifier, deleteInstanceParams); err != nil { - return fmt.Errorf("deleting instance in provider: %w", err) - } - return nil -} - -func (i *instanceManager) consolidateState() error { - i.mux.Lock() - defer i.mux.Unlock() - - if !i.running { - return nil - } - - switch i.instance.Status { - case commonParams.InstancePendingCreate: - // kick off the creation process - if err := i.helper.SetInstanceStatus(i.instance.Name, commonParams.InstanceCreating, nil); err != nil { - return fmt.Errorf("setting instance status to creating: %w", err) - } - if err := i.handleCreateInstanceInProvider(i.instance); err != nil { - slog.ErrorContext(i.ctx, "creating instance in provider", "error", err) - if err := i.helper.SetInstanceStatus(i.instance.Name, commonParams.InstanceError, []byte(err.Error())); err != nil { - return fmt.Errorf("setting instance status to error: %w", err) - } - } - case commonParams.InstanceRunning: - // Nothing to do. The provider finished creating the instance. - case commonParams.InstancePendingDelete, commonParams.InstancePendingForceDelete: - // Remove or force remove the runner. When force remove is specified, we ignore - // IaaS errors. - if i.instance.Status == commonParams.InstancePendingDelete { - // invoke backoff sleep. We only do this for non forced removals, - // as force delete will always return, regardless of whether or not - // the remove operation succeeded in the provider. A user may decide - // to force delete a runner if GARM fails to remove it normally. - if canceled := i.sleepForBackOffOrCanceled(); canceled { - // the worker is shutting down. Return here. - return nil - } - } - - prevStatus := i.instance.Status - if err := i.helper.SetInstanceStatus(i.instance.Name, commonParams.InstanceDeleting, nil); err != nil { - if errors.Is(err, runnerErrors.ErrNotFound) { - return nil - } - return fmt.Errorf("setting instance status to deleting: %w", err) - } - - if err := i.handleDeleteInstanceInProvider(i.instance); err != nil { - slog.ErrorContext(i.ctx, "deleting instance in provider", "error", err, "forced", i.instance.Status == commonParams.InstancePendingForceDelete) - if prevStatus == commonParams.InstancePendingDelete { - i.incrementBackOff() - if err := i.helper.SetInstanceStatus(i.instance.Name, commonParams.InstancePendingDelete, []byte(err.Error())); err != nil { - return fmt.Errorf("setting instance status to error: %w", err) - } - - return fmt.Errorf("error removing instance. Will retry: %w", err) - } - } - if err := i.helper.SetInstanceStatus(i.instance.Name, commonParams.InstanceDeleted, nil); err != nil { - if !errors.Is(err, runnerErrors.ErrNotFound) { - return fmt.Errorf("setting instance status to deleted: %w", err) - } - } - return ErrInstanceDeleted - case commonParams.InstanceError: - // Instance is in error state. We wait for next status or potentially retry - // spawning the instance with a backoff timer. - if err := i.helper.SetInstanceStatus(i.instance.Name, commonParams.InstancePendingDelete, nil); err != nil { - return fmt.Errorf("setting instance status to error: %w", err) - } - case commonParams.InstanceDeleted: - return ErrInstanceDeleted - } - return nil -} - -func (i *instanceManager) handleUpdate(update dbCommon.ChangePayload) error { - // We need a better way to handle instance state. Database updates may fail, and we - // end up with an inconsistent state between what we know about the instance and what - // is reflected in the database. - if !i.running { - return nil - } - - instance, ok := update.Payload.(params.Instance) - if !ok { - return runnerErrors.NewBadRequestError("invalid payload type") - } - - i.instance = instance - return nil -} - -func (i *instanceManager) Update(instance dbCommon.ChangePayload) error { - if !i.running { - return runnerErrors.NewBadRequestError("instance manager is not running") - } - - timer := time.NewTimer(10 * time.Second) - defer timer.Stop() - - slog.DebugContext(i.ctx, "sending update to instance manager") - select { - case i.updates <- instance: - case <-i.quit: - return nil - case <-i.ctx.Done(): - return nil - case <-timer.C: - return fmt.Errorf("timeout while sending update to instance manager") - } - return nil -} - -func (i *instanceManager) updatesLoop() { - defer i.Stop() - ticker := time.NewTicker(5 * time.Second) - defer ticker.Stop() - for { - select { - case <-i.quit: - return - case <-i.ctx.Done(): - return - case update, ok := <-i.updates: - if !ok { - slog.InfoContext(i.ctx, "updates channel closed") - return - } - slog.DebugContext(i.ctx, "received update") - if err := i.handleUpdate(update); err != nil { - if errors.Is(err, ErrInstanceDeleted) { - // instance had been deleted, we can exit the loop. - return - } - slog.ErrorContext(i.ctx, "handling update", "error", err) - } - } - } -} - -func (i *instanceManager) loop() { - defer i.Stop() - ticker := time.NewTicker(5 * time.Second) - defer ticker.Stop() - for { - select { - case <-i.quit: - return - case <-i.ctx.Done(): - return - case <-ticker.C: - if err := i.consolidateState(); err != nil { - if errors.Is(err, ErrInstanceDeleted) { - // instance had been deleted, we can exit the loop. - return - } - slog.ErrorContext(i.ctx, "consolidating state", "error", err) - } - } - } -} diff --git a/workers/provider/provider.go b/workers/provider/provider.go deleted file mode 100644 index 78e50955..00000000 --- a/workers/provider/provider.go +++ /dev/null @@ -1,306 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package provider - -import ( - "context" - "fmt" - "log/slog" - "sync" - - commonParams "github.com/cloudbase/garm-provider-common/params" - "github.com/cloudbase/garm/auth" - dbCommon "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/database/watcher" - "github.com/cloudbase/garm/params" - "github.com/cloudbase/garm/runner/common" - garmUtil "github.com/cloudbase/garm/util" -) - -func NewWorker(ctx context.Context, store dbCommon.Store, providers map[string]common.Provider, tokenGetter auth.InstanceTokenGetter) (*Provider, error) { - consumerID := "provider-worker" - - ctx = garmUtil.WithSlogContext( - ctx, - slog.Any("worker", consumerID)) - - return &Provider{ - ctx: ctx, - store: store, - consumerID: consumerID, - providers: providers, - tokenGetter: tokenGetter, - scaleSets: make(map[uint]params.ScaleSet), - runners: make(map[string]*instanceManager), - }, nil -} - -type Provider struct { - ctx context.Context - consumerID string - - consumer dbCommon.Consumer - // nolint:golangci-lint,godox - // TODO: not all workers should have access to the store. - // We need to implement way to RPC from workers to controllers - // and abstract that into something we can use to eventually - // scale out. - store dbCommon.Store - tokenGetter auth.InstanceTokenGetter - - providers map[string]common.Provider - // A cache of all scale sets kept updated by the watcher. - // This helps us avoid a bunch of queries to the database. - scaleSets map[uint]params.ScaleSet - runners map[string]*instanceManager - - mux sync.Mutex - running bool - quit chan struct{} -} - -func (p *Provider) loadAllScaleSets() error { - scaleSets, err := p.store.ListAllScaleSets(p.ctx) - if err != nil { - return fmt.Errorf("fetching scale sets: %w", err) - } - - for _, scaleSet := range scaleSets { - p.scaleSets[scaleSet.ID] = scaleSet - } - - return nil -} - -// loadAllRunners loads all runners from the database. At this stage we only -// care about runners created by scale sets, but in the future, we will migrate -// the pool manager to the same model. -func (p *Provider) loadAllRunners() error { - runners, err := p.store.ListAllInstances(p.ctx) - if err != nil { - return fmt.Errorf("fetching runners: %w", err) - } - - for _, runner := range runners { - // Skip non scale set instances for now. This condition needs to be - // removed once we replace the current pool manager. - if runner.ScaleSetID == 0 { - continue - } - // Ignore runners in "creating" state. If we're just starting up and - // we find a runner in "creating" it was most likely interrupted while - // creating. It is unlikely that it is still usable. We allow the scale set - // worker to clean it up. It will eventually be marked as pending delete and - // this worker will get an update to clean up any resources left behing by - // an incomplete creation event. - if runner.Status == commonParams.InstanceCreating { - continue - } - if runner.Status == commonParams.InstanceDeleting || runner.Status == commonParams.InstanceDeleted { - continue - } - - scaleSet, ok := p.scaleSets[runner.ScaleSetID] - if !ok { - slog.ErrorContext(p.ctx, "scale set not found", "scale_set_id", runner.ScaleSetID) - continue - } - provider, ok := p.providers[scaleSet.ProviderName] - if !ok { - slog.ErrorContext(p.ctx, "provider not found", "provider_name", runner.ProviderName) - continue - } - instanceManager, err := newInstanceManager( - p.ctx, runner, scaleSet, provider, p) - if err != nil { - return fmt.Errorf("creating instance manager: %w", err) - } - if err := instanceManager.Start(); err != nil { - return fmt.Errorf("starting instance manager: %w", err) - } - - p.runners[runner.Name] = instanceManager - } - - return nil -} - -func (p *Provider) Start() error { - p.mux.Lock() - defer p.mux.Unlock() - - if p.running { - return nil - } - - if err := p.loadAllScaleSets(); err != nil { - return fmt.Errorf("loading all scale sets: %w", err) - } - - if err := p.loadAllRunners(); err != nil { - return fmt.Errorf("loading all runners: %w", err) - } - - consumer, err := watcher.RegisterConsumer( - p.ctx, p.consumerID, composeProviderWatcher()) - if err != nil { - return fmt.Errorf("registering consumer: %w", err) - } - p.consumer = consumer - - p.quit = make(chan struct{}) - p.running = true - go p.loop() - - return nil -} - -func (p *Provider) Stop() error { - p.mux.Lock() - defer p.mux.Unlock() - - if !p.running { - return nil - } - - p.consumer.Close() - close(p.quit) - p.running = false - return nil -} - -func (p *Provider) loop() { - defer p.Stop() - for { - select { - case payload, ok := <-p.consumer.Watch(): - if !ok { - slog.ErrorContext(p.ctx, "watcher channel closed") - return - } - slog.InfoContext(p.ctx, "received payload") - go p.handleWatcherEvent(payload) - case <-p.ctx.Done(): - return - case <-p.quit: - return - } - } -} - -func (p *Provider) handleWatcherEvent(payload dbCommon.ChangePayload) { - switch payload.EntityType { - case dbCommon.ScaleSetEntityType: - p.handleScaleSetEvent(payload) - case dbCommon.InstanceEntityType: - p.handleInstanceEvent(payload) - default: - slog.ErrorContext(p.ctx, "invalid entity type", "entity_type", payload.EntityType) - } -} - -func (p *Provider) handleScaleSetEvent(event dbCommon.ChangePayload) { - p.mux.Lock() - defer p.mux.Unlock() - - scaleSet, ok := event.Payload.(params.ScaleSet) - if !ok { - slog.ErrorContext(p.ctx, "invalid payload type", "payload_type", fmt.Sprintf("%T", event.Payload)) - return - } - - switch event.Operation { - case dbCommon.CreateOperation, dbCommon.UpdateOperation: - slog.DebugContext(p.ctx, "got create/update operation") - p.scaleSets[scaleSet.ID] = scaleSet - case dbCommon.DeleteOperation: - slog.DebugContext(p.ctx, "got delete operation") - delete(p.scaleSets, scaleSet.ID) - default: - slog.ErrorContext(p.ctx, "invalid operation type", "operation_type", event.Operation) - return - } -} - -func (p *Provider) handleInstanceAdded(instance params.Instance) error { - scaleSet, ok := p.scaleSets[instance.ScaleSetID] - if !ok { - return fmt.Errorf("scale set not found for instance %s", instance.Name) - } - instanceManager, err := newInstanceManager( - p.ctx, instance, scaleSet, p.providers[instance.ProviderName], p) - if err != nil { - return fmt.Errorf("creating instance manager: %w", err) - } - if err := instanceManager.Start(); err != nil { - return fmt.Errorf("starting instance manager: %w", err) - } - p.runners[instance.Name] = instanceManager - return nil -} - -func (p *Provider) handleInstanceEvent(event dbCommon.ChangePayload) { - p.mux.Lock() - defer p.mux.Unlock() - - instance, ok := event.Payload.(params.Instance) - if !ok { - slog.ErrorContext(p.ctx, "invalid payload type", "payload_type", fmt.Sprintf("%T", event.Payload)) - return - } - - if instance.ScaleSetID == 0 { - slog.DebugContext(p.ctx, "skipping instance event for non scale set instance") - return - } - - slog.DebugContext(p.ctx, "handling instance event", "instance_name", instance.Name) - switch event.Operation { - case dbCommon.CreateOperation: - slog.DebugContext(p.ctx, "got create operation") - if err := p.handleInstanceAdded(instance); err != nil { - slog.ErrorContext(p.ctx, "failed to handle instance added", "error", err) - return - } - case dbCommon.UpdateOperation: - slog.DebugContext(p.ctx, "got update operation") - existingInstance, ok := p.runners[instance.Name] - if !ok { - slog.DebugContext(p.ctx, "instance not found, creating new instance", "instance_name", instance.Name) - if err := p.handleInstanceAdded(instance); err != nil { - slog.ErrorContext(p.ctx, "failed to handle instance added", "error", err) - return - } - } else { - slog.DebugContext(p.ctx, "updating instance", "instance_name", instance.Name) - if err := existingInstance.Update(event); err != nil { - slog.ErrorContext(p.ctx, "failed to update instance", "error", err) - return - } - } - case dbCommon.DeleteOperation: - slog.DebugContext(p.ctx, "got delete operation", "instance_name", instance.Name) - existingInstance, ok := p.runners[instance.Name] - if ok { - if err := existingInstance.Stop(); err != nil { - slog.ErrorContext(p.ctx, "failed to stop instance", "error", err) - return - } - } - delete(p.runners, instance.Name) - default: - slog.ErrorContext(p.ctx, "invalid operation type", "operation_type", event.Operation) - return - } -} diff --git a/workers/provider/provider_helper.go b/workers/provider/provider_helper.go deleted file mode 100644 index 13694794..00000000 --- a/workers/provider/provider_helper.go +++ /dev/null @@ -1,94 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package provider - -import ( - "fmt" - - "github.com/cloudbase/garm-provider-common/errors" - commonParams "github.com/cloudbase/garm-provider-common/params" - "github.com/cloudbase/garm/auth" - "github.com/cloudbase/garm/params" -) - -type providerHelper interface { - SetInstanceStatus(instanceName string, status commonParams.InstanceStatus, providerFault []byte) error - InstanceTokenGetter() auth.InstanceTokenGetter - updateArgsFromProviderInstance(instanceName string, providerInstance commonParams.ProviderInstance) (params.Instance, error) - GetControllerInfo() (params.ControllerInfo, error) - GetGithubEntity(entity params.ForgeEntity) (params.ForgeEntity, error) -} - -func (p *Provider) updateArgsFromProviderInstance(instanceName string, providerInstance commonParams.ProviderInstance) (params.Instance, error) { - updateParams := params.UpdateInstanceParams{ - ProviderID: providerInstance.ProviderID, - OSName: providerInstance.OSName, - OSVersion: providerInstance.OSVersion, - Addresses: providerInstance.Addresses, - Status: providerInstance.Status, - ProviderFault: providerInstance.ProviderFault, - } - - updated, err := p.store.UpdateInstance(p.ctx, instanceName, updateParams) - if err != nil { - return params.Instance{}, fmt.Errorf("updating instance %s: %w", instanceName, err) - } - return updated, nil -} - -func (p *Provider) GetControllerInfo() (params.ControllerInfo, error) { - p.mux.Lock() - defer p.mux.Unlock() - - info, err := p.store.ControllerInfo() - if err != nil { - return params.ControllerInfo{}, fmt.Errorf("getting controller info: %w", err) - } - - return info, nil -} - -func (p *Provider) SetInstanceStatus(instanceName string, status commonParams.InstanceStatus, providerFault []byte) error { - p.mux.Lock() - defer p.mux.Unlock() - - if _, ok := p.runners[instanceName]; !ok { - return errors.ErrNotFound - } - - updateParams := params.UpdateInstanceParams{ - Status: status, - ProviderFault: providerFault, - } - - _, err := p.store.UpdateInstance(p.ctx, instanceName, updateParams) - if err != nil { - return fmt.Errorf("updating instance %s: %w", instanceName, err) - } - - return nil -} - -func (p *Provider) InstanceTokenGetter() auth.InstanceTokenGetter { - return p.tokenGetter -} - -func (p *Provider) GetGithubEntity(entity params.ForgeEntity) (params.ForgeEntity, error) { - ghEntity, err := p.store.GetForgeEntity(p.ctx, entity.EntityType, entity.ID) - if err != nil { - return params.ForgeEntity{}, fmt.Errorf("getting github entity: %w", err) - } - - return ghEntity, nil -} diff --git a/workers/provider/util.go b/workers/provider/util.go deleted file mode 100644 index cf27d14f..00000000 --- a/workers/provider/util.go +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package provider - -import ( - dbCommon "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/database/watcher" -) - -func composeProviderWatcher() dbCommon.PayloadFilterFunc { - return watcher.WithAny( - watcher.WithEntityTypeFilter(dbCommon.InstanceEntityType), - watcher.WithEntityTypeFilter(dbCommon.ScaleSetEntityType), - ) -} diff --git a/workers/scaleset/controller.go b/workers/scaleset/controller.go deleted file mode 100644 index 32d3d713..00000000 --- a/workers/scaleset/controller.go +++ /dev/null @@ -1,228 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package scaleset - -import ( - "context" - "fmt" - "log/slog" - "sync" - - "golang.org/x/sync/errgroup" - - dbCommon "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/database/watcher" - "github.com/cloudbase/garm/params" - "github.com/cloudbase/garm/runner/common" - garmUtil "github.com/cloudbase/garm/util" -) - -func NewController(ctx context.Context, store dbCommon.Store, entity params.ForgeEntity, providers map[string]common.Provider) (*Controller, error) { - consumerID := fmt.Sprintf("scaleset-controller-%s", entity.ID) - - ctx = garmUtil.WithSlogContext( - ctx, - slog.Any("worker", consumerID), - slog.Any("entity", entity.String()), - slog.Any("endpoint", entity.Credentials.Endpoint), - ) - - return &Controller{ - ctx: ctx, - consumerID: consumerID, - ScaleSets: make(map[uint]*scaleSet), - Entity: entity, - providers: providers, - store: store, - }, nil -} - -type scaleSet struct { - scaleSet params.ScaleSet - worker *Worker - - mux sync.Mutex -} - -func (s *scaleSet) Stop() error { - s.mux.Lock() - defer s.mux.Unlock() - - if s.worker == nil { - return nil - } - - return s.worker.Stop() -} - -// Controller is responsible for managing scale sets for one github entity. -type Controller struct { - ctx context.Context - consumerID string - - ScaleSets map[uint]*scaleSet - - Entity params.ForgeEntity - - consumer dbCommon.Consumer - store dbCommon.Store - providers map[string]common.Provider - - mux sync.Mutex - running bool - quit chan struct{} -} - -func (c *Controller) loadAllScaleSets() error { - scaleSets, err := c.store.ListEntityScaleSets(c.ctx, c.Entity) - if err != nil { - return fmt.Errorf("listing scale sets: %w", err) - } - - for _, sSet := range scaleSets { - slog.DebugContext(c.ctx, "loading scale set", "scale_set", sSet.ID) - if err := c.handleScaleSetCreateOperation(sSet); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(c.ctx, "failed to handle scale set create operation") - continue - } - } - return nil -} - -func (c *Controller) Start() (err error) { - slog.DebugContext(c.ctx, "starting scale set controller", "scale_set", c.consumerID) - c.mux.Lock() - if c.running { - c.mux.Unlock() - return nil - } - c.mux.Unlock() - - forgeType, err := c.Entity.GetForgeType() - if err != nil { - return fmt.Errorf("getting forge type: %w", err) - } - if forgeType == params.GithubEndpointType { - // scale sets are only available in Github - slog.DebugContext(c.ctx, "loaging scale sets", "entity", c.Entity.String()) - if err := c.loadAllScaleSets(); err != nil { - return fmt.Errorf("loading all scale sets: %w", err) - } - } - - consumer, err := watcher.RegisterConsumer( - c.ctx, c.consumerID, - composeControllerWatcherFilters(c.Entity), - ) - if err != nil { - return fmt.Errorf("registering consumer %q: %w", c.consumerID, err) - } - - c.mux.Lock() - c.consumer = consumer - c.running = true - c.quit = make(chan struct{}) - c.mux.Unlock() - - go c.loop() - return nil -} - -func (c *Controller) Stop() error { - slog.DebugContext(c.ctx, "stopping scale set controller", "scale_set", c.consumerID) - c.mux.Lock() - defer c.mux.Unlock() - - if !c.running { - return nil - } - slog.DebugContext(c.ctx, "stopping scaleset controller", "entity", c.Entity.String()) - - for scaleSetID, scaleSet := range c.ScaleSets { - if err := scaleSet.Stop(); err != nil { - slog.ErrorContext(c.ctx, "stopping worker for scale set", "scale_set_id", scaleSetID, "error", err) - continue - } - delete(c.ScaleSets, scaleSetID) - } - - c.running = false - close(c.quit) - c.consumer.Close() - slog.DebugContext(c.ctx, "stopped scale set controller", "entity", c.Entity.String()) - return nil -} - -// ConsolidateRunnerState will send a list of existing github runners to each scale set worker. -// The scale set worker will then need to cross check the existing runners in Github with the sate -// in the database. Any inconsistencies will b reconciliated. This cleans up any manually removed -// runners in either github or the providers. -func (c *Controller) ConsolidateRunnerState(byScaleSetID map[int][]params.RunnerReference) error { - g, ctx := errgroup.WithContext(c.ctx) - for _, scaleSet := range c.ScaleSets { - runners := byScaleSetID[scaleSet.scaleSet.ScaleSetID] - g.Go(func() error { - slog.DebugContext(ctx, "consolidating runners for scale set", "scale_set_id", scaleSet.scaleSet.ScaleSetID, "runners", runners) - if err := scaleSet.worker.consolidateRunnerState(runners); err != nil { - return fmt.Errorf("consolidating runners for scale set %d: %w", scaleSet.scaleSet.ScaleSetID, err) - } - return nil - }) - } - if err := c.waitForErrorGroupOrContextCancelled(g); err != nil { - return fmt.Errorf("waiting for error group: %w", err) - } - return nil -} - -func (c *Controller) waitForErrorGroupOrContextCancelled(g *errgroup.Group) error { - if g == nil { - return nil - } - - done := make(chan error, 1) - go func() { - waitErr := g.Wait() - done <- waitErr - }() - - select { - case err := <-done: - return err - case <-c.ctx.Done(): - return c.ctx.Err() - case <-c.quit: - return nil - } -} - -func (c *Controller) loop() { - defer c.Stop() - - for { - select { - case payload, ok := <-c.consumer.Watch(): - if !ok { - slog.InfoContext(c.ctx, "consumer channel closed") - return - } - slog.InfoContext(c.ctx, "received payload") - c.handleWatcherEvent(payload) - case <-c.ctx.Done(): - return - case <-c.quit: - return - } - } -} diff --git a/workers/scaleset/controller_watcher.go b/workers/scaleset/controller_watcher.go deleted file mode 100644 index e3c32ea6..00000000 --- a/workers/scaleset/controller_watcher.go +++ /dev/null @@ -1,192 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package scaleset - -import ( - "fmt" - "log/slog" - - dbCommon "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/params" -) - -func (c *Controller) handleWatcherEvent(event dbCommon.ChangePayload) { - entityType := dbCommon.DatabaseEntityType(c.Entity.EntityType) - switch event.EntityType { - case dbCommon.ScaleSetEntityType: - slog.DebugContext(c.ctx, "got scale set payload event") - c.handleScaleSet(event) - case entityType: - slog.DebugContext(c.ctx, "got entity payload event") - c.handleEntityEvent(event) - default: - slog.ErrorContext(c.ctx, "invalid entity type", "entity_type", event.EntityType) - return - } -} - -func (c *Controller) handleScaleSet(event dbCommon.ChangePayload) { - scaleSet, ok := event.Payload.(params.ScaleSet) - if !ok { - slog.ErrorContext(c.ctx, "invalid scale set payload for entity type", "entity_type", event.EntityType, "payload", event) - return - } - - switch event.Operation { - case dbCommon.CreateOperation: - slog.DebugContext(c.ctx, "got create operation for scale set", "scale_set_id", scaleSet.ID, "scale_set_name", scaleSet.Name) - if err := c.handleScaleSetCreateOperation(scaleSet); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(c.ctx, "failed to handle scale set create operation") - } - case dbCommon.UpdateOperation: - slog.DebugContext(c.ctx, "got update operation for scale set", "scale_set_id", scaleSet.ID, "scale_set_name", scaleSet.Name) - if err := c.handleScaleSetUpdateOperation(scaleSet); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(c.ctx, "failed to handle scale set update operation") - } - case dbCommon.DeleteOperation: - slog.DebugContext(c.ctx, "got delete operation") - if err := c.handleScaleSetDeleteOperation(scaleSet); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(c.ctx, "failed to handle scale set delete operation") - } - default: - slog.ErrorContext(c.ctx, "invalid operation type", "operation_type", event.Operation) - return - } -} - -func (c *Controller) createScaleSetWorker(scaleSet params.ScaleSet) (*Worker, error) { - provider, ok := c.providers[scaleSet.ProviderName] - if !ok { - // Providers are currently static, set in the config and cannot be updated without a restart. - // ScaleSets and pools also do not allow updating the provider. This condition is not recoverable - // without a restart, so we don't need to instantiate a worker for this scale set. - return nil, fmt.Errorf("provider %s not found for scale set %s", scaleSet.ProviderName, scaleSet.Name) - } - - worker, err := NewWorker(c.ctx, c.store, scaleSet, provider) - if err != nil { - return nil, fmt.Errorf("creating scale set worker: %w", err) - } - return worker, nil -} - -func (c *Controller) handleScaleSetCreateOperation(sSet params.ScaleSet) error { - c.mux.Lock() - defer c.mux.Unlock() - - if _, ok := c.ScaleSets[sSet.ID]; ok { - slog.DebugContext(c.ctx, "scale set already exists in worker list", "scale_set_id", sSet.ID) - return nil - } - - worker, err := c.createScaleSetWorker(sSet) - if err != nil { - return fmt.Errorf("error creating scale set worker: %w", err) - } - if err := worker.Start(); err != nil { - // The Start() function should only return an error if an unrecoverable error occurs. - // For transient errors, it should mark the scale set as being in error, but continue - // to retry fixing the condition. For example, not being able to retrieve tools due to bad - // credentials should not stop the worker. The credentials can be fixed and the worker - // can continue to work. - return fmt.Errorf("error starting scale set worker: %w", err) - } - c.ScaleSets[sSet.ID] = &scaleSet{ - scaleSet: sSet, - worker: worker, - } - return nil -} - -func (c *Controller) handleScaleSetDeleteOperation(sSet params.ScaleSet) error { - c.mux.Lock() - defer c.mux.Unlock() - - set, ok := c.ScaleSets[sSet.ID] - if !ok { - slog.DebugContext(c.ctx, "scale set not found in worker list", "scale_set_id", sSet.ID) - return nil - } - - slog.DebugContext(c.ctx, "stopping scale set worker", "scale_set_id", sSet.ID) - if err := set.worker.Stop(); err != nil { - return fmt.Errorf("stopping scale set worker: %w", err) - } - delete(c.ScaleSets, sSet.ID) - return nil -} - -func (c *Controller) handleScaleSetUpdateOperation(sSet params.ScaleSet) error { - c.mux.Lock() - defer c.mux.Unlock() - - set, ok := c.ScaleSets[sSet.ID] - if !ok { - // Some error may have occurred when the scale set was first created, so we - // attempt to create it after the user updated the scale set, hopefully - // fixing the reason for the failure. - return c.handleScaleSetCreateOperation(sSet) - } - if set.worker != nil && !set.worker.IsRunning() { - worker, err := c.createScaleSetWorker(sSet) - if err != nil { - return fmt.Errorf("creating scale set worker: %w", err) - } - set.worker = worker - defer func() { - if err := worker.Start(); err != nil { - slog.ErrorContext(c.ctx, "failed to start worker", "error", err, "scaleset", sSet.Name) - } - }() - } - - set.scaleSet = sSet - c.ScaleSets[sSet.ID] = set - // We let the watcher in the scale set worker handle the update operation. - return nil -} - -func (c *Controller) handleEntityEvent(event dbCommon.ChangePayload) { - var entityGetter params.EntityGetter - var ok bool - switch c.Entity.EntityType { - case params.ForgeEntityTypeRepository: - entityGetter, ok = event.Payload.(params.Repository) - case params.ForgeEntityTypeOrganization: - entityGetter, ok = event.Payload.(params.Organization) - case params.ForgeEntityTypeEnterprise: - entityGetter, ok = event.Payload.(params.Enterprise) - } - if !ok { - slog.ErrorContext(c.ctx, "invalid entity payload for entity type", "entity_type", event.EntityType, "payload", event) - return - } - - entity, err := entityGetter.GetEntity() - if err != nil { - slog.ErrorContext(c.ctx, "invalid GitHub entity payload for entity type", "entity_type", event.EntityType, "payload", event) - return - } - - switch event.Operation { - case dbCommon.UpdateOperation: - slog.DebugContext(c.ctx, "got update operation") - c.mux.Lock() - defer c.mux.Unlock() - c.Entity = entity - default: - slog.ErrorContext(c.ctx, "invalid operation type", "operation_type", event.Operation) - return - } -} diff --git a/workers/scaleset/interfaces.go b/workers/scaleset/interfaces.go deleted file mode 100644 index b8acfceb..00000000 --- a/workers/scaleset/interfaces.go +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package scaleset - -import ( - "github.com/cloudbase/garm/params" - "github.com/cloudbase/garm/util/github/scalesets" -) - -type scaleSetHelper interface { - GetScaleSet() params.ScaleSet - GetScaleSetClient() (*scalesets.ScaleSetClient, error) - SetLastMessageID(id int64) error - SetDesiredRunnerCount(count int) error - Owner() string - HandleJobsCompleted(jobs []params.ScaleSetJobMessage) error - HandleJobsStarted(jobs []params.ScaleSetJobMessage) error - HandleJobsAvailable(jobs []params.ScaleSetJobMessage) error -} diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go deleted file mode 100644 index 8c0abefa..00000000 --- a/workers/scaleset/scaleset.go +++ /dev/null @@ -1,1066 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package scaleset - -import ( - "context" - "errors" - "fmt" - "log/slog" - "sync" - "time" - - runnerErrors "github.com/cloudbase/garm-provider-common/errors" - commonParams "github.com/cloudbase/garm-provider-common/params" - "github.com/cloudbase/garm-provider-common/util" - "github.com/cloudbase/garm/cache" - dbCommon "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/database/watcher" - "github.com/cloudbase/garm/locking" - "github.com/cloudbase/garm/params" - "github.com/cloudbase/garm/runner/common" -) - -func NewWorker(ctx context.Context, store dbCommon.Store, scaleSet params.ScaleSet, provider common.Provider) (*Worker, error) { - consumerID := fmt.Sprintf("scaleset-worker-%s-%d", scaleSet.Name, scaleSet.ID) - controllerInfo, err := store.ControllerInfo() - if err != nil { - return nil, fmt.Errorf("getting controller info: %w", err) - } - return &Worker{ - ctx: ctx, - controllerInfo: controllerInfo, - consumerID: consumerID, - store: store, - provider: provider, - scaleSet: scaleSet, - runners: make(map[string]params.Instance), - }, nil -} - -type Worker struct { - ctx context.Context - consumerID string - controllerInfo params.ControllerInfo - - provider common.Provider - store dbCommon.Store - scaleSet params.ScaleSet - runners map[string]params.Instance - - consumer dbCommon.Consumer - - listener *scaleSetListener - - mux sync.Mutex - running bool - quit chan struct{} -} - -func (w *Worker) ensureScaleSetInGitHub() error { - entity, err := w.scaleSet.GetEntity() - if err != nil { - return fmt.Errorf("failed to get entity: %w", err) - } - cli, err := w.GetScaleSetClient() - if err != nil { - return fmt.Errorf("failed to get scaleset client: %w", err) - } - - ghCli, err := cli.GetGithubClient() - if err != nil { - return fmt.Errorf("failed to get github client: %w", err) - } - - rgID, err := ghCli.GetEntityRunnerGroupIDByName(w.ctx, w.scaleSet.GitHubRunnerGroup) - if err != nil { - return fmt.Errorf("failed to get github runner group for entity %s: %w", entity.ID, err) - } - scaleSet, err := cli.GetRunnerScaleSetByNameAndRunnerGroup(w.ctx, int(rgID), w.scaleSet.Name) - if err == nil { - // The scale set exists - if scaleSet.ID != w.scaleSet.ScaleSetID { - // The scale set exists in github, but the ID differs from what we know to be true. - // It is possible that the scale set is being managed by some other auto scaler. - // We error here, as there is no way to listen on a scale set that already has a listener - // or is being managed by something else. - return fmt.Errorf("scale set already exists in github and it differs from the ID we know (github: %d vs local: %d)", scaleSet.ID, w.scaleSet.ScaleSetID) - } - return nil - } - if !errors.Is(err, runnerErrors.ErrNotFound) { - return fmt.Errorf("failed to get scale set: %w", err) - } - - createScaleSetParams := ¶ms.RunnerScaleSet{ - Name: w.scaleSet.Name, - RunnerGroupID: rgID, - Labels: []params.Label{ - { - Name: w.scaleSet.Name, - Type: "System", - }, - }, - RunnerSetting: params.RunnerSetting{ - Ephemeral: true, - DisableUpdate: w.scaleSet.DisableUpdate, - }, - Enabled: &w.scaleSet.Enabled, - } - runnerScaleSet, err := cli.CreateRunnerScaleSet(w.ctx, createScaleSetParams) - if err != nil { - return fmt.Errorf("error creating runner scale set: %w", err) - } - - // update the DB scale set - updateParams := params.UpdateScaleSetParams{ - ScaleSetID: runnerScaleSet.ID, - } - _, err = w.store.UpdateEntityScaleSet(w.ctx, entity, w.scaleSet.ID, updateParams, nil) - if err != nil { - return fmt.Errorf("failed to update scale set: %w", err) - } - w.scaleSet.ScaleSetID = runnerScaleSet.ID - - return nil -} - -func (w *Worker) Stop() error { - slog.DebugContext(w.ctx, "stopping scale set worker", "scale_set", w.consumerID) - w.mux.Lock() - defer w.mux.Unlock() - - if !w.running { - return nil - } - - w.consumer.Close() - w.running = false - if w.quit != nil { - close(w.quit) - } - w.listener.Stop() - return nil -} - -func (w *Worker) IsRunning() bool { - w.mux.Lock() - defer w.mux.Unlock() - - return w.running -} - -func (w *Worker) Start() (err error) { - slog.DebugContext(w.ctx, "starting scale set worker", "scale_set", w.consumerID) - w.mux.Lock() - defer w.mux.Unlock() - - if w.running { - return nil - } - - instances, err := w.store.ListScaleSetInstances(w.ctx, w.scaleSet.ID) - if err != nil { - return fmt.Errorf("listing scale set instances: %w", err) - } - - for _, instance := range instances { - switch instance.Status { - case commonParams.InstanceCreating: - // We're just starting up. We found an instance stuck in creating. - // When a provider creates an instance, it sets the db instance to - // creating and then issues an API call to the IaaS to create the - // instance using some userdata it needs to come up. But the instance - // will still need to call back home to fetch aditional metadata and - // complete its setup. We should remove the instance as it is not - // possible to reliably determine the state of the instance (if it's in - // mid boot before it reached the phase where it runs the metadtata, or - // if it already failed). - instanceState := commonParams.InstancePendingDelete - locking.Lock(instance.Name, w.consumerID) - if instance.AgentID != 0 { - scaleSetCli, err := w.GetScaleSetClient() - if err != nil { - slog.ErrorContext(w.ctx, "error getting scale set client", "error", err) - return fmt.Errorf("getting scale set client: %w", err) - } - if err := scaleSetCli.RemoveRunner(w.ctx, instance.AgentID); err != nil { - // scale sets use JIT runners. This means that we create the runner in github - // before we create the actual instance that will use the credentials. We need - // to remove the runner from github if it exists. - if !errors.Is(err, runnerErrors.ErrNotFound) { - if errors.Is(err, runnerErrors.ErrUnauthorized) { - // we don't have access to remove the runner. This implies that our - // credentials may have expired or ar incorrect. - // - // nolint:golangci-lint,godox - // TODO(gabriel-samfira): we need to set the scale set as inactive and stop the listener (if any). - slog.ErrorContext(w.ctx, "error removing runner", "runner_name", instance.Name, "error", err) - w.runners[instance.ID] = instance - locking.Unlock(instance.Name, false) - continue - } - // The runner may have come up, registered and is currently running a - // job, in which case, github will not allow us to remove it. - runnerInstance, err := scaleSetCli.GetRunner(w.ctx, instance.AgentID) - if err != nil { - if !errors.Is(err, runnerErrors.ErrNotFound) { - // We could not get info about the runner and it wasn't not found - slog.ErrorContext(w.ctx, "error getting runner details", "error", err) - w.runners[instance.ID] = instance - locking.Unlock(instance.Name, false) - continue - } - } - if runnerInstance.Status == string(params.RunnerIdle) || - runnerInstance.Status == string(params.RunnerActive) { - // This is a highly unlikely scenario, but let's account for it anyway. - // - // The runner is running a job or is idle. Mark it as running, as - // it appears that it finished booting and is now running. - // - // NOTE: if the instance was in creating and it managed to boot, there - // is a high chance that the we do not have a provider ID for the runner - // inside our database. When removing the runner, the provider will attempt - // to use the instance name instead of the provider ID, the same as when - // creation of the instance fails and we try to clean up any lingering resources - // in the provider. - slog.DebugContext(w.ctx, "runner is running a job or is idle; not removing", "runner_name", instance.Name) - instanceState = commonParams.InstanceRunning - } - } - } - } - runnerUpdateParams := params.UpdateInstanceParams{ - Status: instanceState, - } - instance, err = w.store.UpdateInstance(w.ctx, instance.Name, runnerUpdateParams) - if err != nil { - if !errors.Is(err, runnerErrors.ErrNotFound) { - locking.Unlock(instance.Name, false) - return fmt.Errorf("updating runner %s: %w", instance.Name, err) - } - } - case commonParams.InstanceDeleting: - // Set the instance in deleting. It is assumed that the runner was already - // removed from github either by github or by garm. Deleting status indicates - // that it was already being handled by the provider. There should be no entry on - // github for the runner if that was the case. - // Setting it in pending_delete will cause the provider to try again, an operation - // which is idempotent (if it's already deleted, the provider reports success). - runnerUpdateParams := params.UpdateInstanceParams{ - Status: commonParams.InstancePendingDelete, - } - instance, err = w.store.UpdateInstance(w.ctx, instance.Name, runnerUpdateParams) - if err != nil { - if !errors.Is(err, runnerErrors.ErrNotFound) { - locking.Unlock(instance.Name, false) - return fmt.Errorf("updating runner %s: %w", instance.Name, err) - } - } - case commonParams.InstanceDeleted: - if err := w.handleInstanceCleanup(instance); err != nil { - locking.Unlock(instance.Name, false) - return fmt.Errorf("failed to remove database entry for %s: %w", instance.Name, err) - } - continue - } - w.runners[instance.ID] = instance - locking.Unlock(instance.Name, false) - } - - if err := w.ensureScaleSetInGitHub(); err != nil { - return fmt.Errorf("failed to ensure scale set: %w", err) - } - - consumer, err := watcher.RegisterConsumer( - w.ctx, w.consumerID, - watcher.WithAny( - watcher.WithAll( - watcher.WithScaleSetFilter(w.scaleSet), - watcher.WithOperationTypeFilter(dbCommon.UpdateOperation), - ), - watcher.WithScaleSetInstanceFilter(w.scaleSet), - ), - ) - if err != nil { - return fmt.Errorf("error registering consumer: %w", err) - } - defer func() { - if err != nil { - consumer.Close() - } - }() - - slog.DebugContext(w.ctx, "creating scale set listener") - listener := newListener(w.ctx, w) - - if w.scaleSet.Enabled { - slog.DebugContext(w.ctx, "starting scale set listener") - if err := listener.Start(); err != nil { - return fmt.Errorf("error starting listener: %w", err) - } - } else { - slog.InfoContext(w.ctx, "scale set is disabled; not starting listener") - } - - w.listener = listener - w.consumer = consumer - w.running = true - w.quit = make(chan struct{}) - - slog.DebugContext(w.ctx, "starting scale set worker loops", "scale_set", w.consumerID) - go w.loop() - go w.keepListenerAlive() - go w.handleAutoScale() - return nil -} - -func (w *Worker) runnerByName() map[string]params.Instance { - runners := make(map[string]params.Instance) - for _, runner := range w.runners { - runners[runner.Name] = runner - } - return runners -} - -func (w *Worker) setRunnerDBStatus(runner string, status commonParams.InstanceStatus) (params.Instance, error) { - updateParams := params.UpdateInstanceParams{ - Status: status, - } - newDbInstance, err := w.store.UpdateInstance(w.ctx, runner, updateParams) - if err != nil { - if !errors.Is(err, runnerErrors.ErrNotFound) { - return params.Instance{}, fmt.Errorf("updating runner %s: %w", runner, err) - } - } - return newDbInstance, nil -} - -func (w *Worker) removeRunnerFromGithubAndSetPendingDelete(runnerName string, agentID int64) error { - scaleSetCli, err := w.GetScaleSetClient() - if err != nil { - return fmt.Errorf("getting scale set client: %w", err) - } - if err := scaleSetCli.RemoveRunner(w.ctx, agentID); err != nil { - if !errors.Is(err, runnerErrors.ErrNotFound) { - return fmt.Errorf("removing runner %s: %w", runnerName, err) - } - } - instance, err := w.setRunnerDBStatus(runnerName, commonParams.InstancePendingDelete) - if err != nil { - return fmt.Errorf("updating runner %s: %w", instance.Name, err) - } - w.runners[instance.ID] = instance - return nil -} - -func (w *Worker) reapTimedOutRunners(runners map[string]params.RunnerReference) (func(), error) { - lockNames := []string{} - - unlockFn := func() { - for _, name := range lockNames { - slog.DebugContext(w.ctx, "unlockFn unlocking runner", "runner_name", name) - locking.Unlock(name, false) - } - } - - for _, runner := range w.runners { - if time.Since(runner.UpdatedAt).Minutes() < float64(w.scaleSet.RunnerBootstrapTimeout) { - continue - } - switch runner.Status { - case commonParams.InstancePendingDelete, commonParams.InstancePendingForceDelete, - commonParams.InstanceDeleting, commonParams.InstanceDeleted: - continue - } - - if runner.RunnerStatus != params.RunnerPending && runner.RunnerStatus != params.RunnerInstalling { - slog.DebugContext(w.ctx, "runner is not pending or installing; skipping", "runner_name", runner.Name) - continue - } - if ghRunner, ok := runners[runner.Name]; !ok || ghRunner.GetStatus() == params.RunnerOffline { - if ok := locking.TryLock(runner.Name, w.consumerID); !ok { - slog.DebugContext(w.ctx, "runner is locked; skipping", "runner_name", runner.Name) - continue - } - lockNames = append(lockNames, runner.Name) - - slog.InfoContext( - w.ctx, "reaping timed-out/failed runner", - "runner_name", runner.Name) - - if err := w.removeRunnerFromGithubAndSetPendingDelete(runner.Name, runner.AgentID); err != nil { - slog.ErrorContext(w.ctx, "error removing runner", "runner_name", runner.Name, "error", err) - unlockFn() - return nil, fmt.Errorf("removing runner %s: %w", runner.Name, err) - } - } - } - return unlockFn, nil -} - -func (w *Worker) consolidateRunnerState(runners []params.RunnerReference) error { - w.mux.Lock() - defer w.mux.Unlock() - - ghRunnersByName := make(map[string]params.RunnerReference) - for _, runner := range runners { - ghRunnersByName[runner.Name] = runner - } - - scaleSetCli, err := w.GetScaleSetClient() - if err != nil { - return fmt.Errorf("getting scale set client: %w", err) - } - dbRunnersByName := w.runnerByName() - // Cross check what exists in github with what we have in the database. - for name, runner := range ghRunnersByName { - status := runner.GetStatus() - if _, ok := dbRunnersByName[name]; !ok { - // runner appears to be active. Is it not managed by GARM? - if status != params.RunnerIdle && status != params.RunnerActive { - slog.InfoContext(w.ctx, "runner does not exist in GARM; removing from github", "runner_name", name) - if err := scaleSetCli.RemoveRunner(w.ctx, runner.ID); err != nil { - if errors.Is(err, runnerErrors.ErrNotFound) { - continue - } - slog.ErrorContext(w.ctx, "error removing runner", "runner_name", runner.Name, "error", err) - } - } - continue - } - } - - unlockFn, err := w.reapTimedOutRunners(ghRunnersByName) - if err != nil { - return fmt.Errorf("reaping timed out runners: %w", err) - } - defer unlockFn() - - // refresh the map. It may have been mutated above. - dbRunnersByName = w.runnerByName() - // Cross check what exists in the database with what we have in github. - for name, runner := range dbRunnersByName { - // in the case of scale sets, JIT configs re used. There is no situation - // in which we create a runner in the DB and one does not exist in github. - // We can safely assume that if the runner is not in github anymore, it can - // be removed from the provider and the DB. - switch runner.Status { - case commonParams.InstancePendingDelete, commonParams.InstancePendingForceDelete, - commonParams.InstanceDeleting, commonParams.InstanceDeleted: - continue - } - - if _, ok := ghRunnersByName[name]; !ok { - if ok := locking.TryLock(name, w.consumerID); !ok { - slog.DebugContext(w.ctx, "runner is locked; skipping", "runner_name", name) - continue - } - // unlock the runner only after this function returns. This function also cross - // checks between the provider and the database, and removes left over runners. - // If we unlock early, the provider worker will attempt to remove runners that - // we set in pending_delete. This function holds the mutex, so we won't see those - // changes until we return. So we hold the instance lock here until we are done. - // That way, even if the provider sees the pending_delete status, it won't act on - // it until it manages to lock the instance. - defer locking.Unlock(name, false) - - slog.InfoContext(w.ctx, "runner does not exist in github; removing from provider", "runner_name", name) - instance, err := w.setRunnerDBStatus(runner.Name, commonParams.InstancePendingDelete) - if err != nil { - if !errors.Is(err, runnerErrors.ErrNotFound) { - return fmt.Errorf("updating runner %s: %w", instance.Name, err) - } - } - // We will get an update event anyway from the watcher, but updating the runner - // here, will prevent race conditions if some other event is already in the queue - // which involves this runner. For the duration of the lifetime of this function, we - // hold the lock, so no race condition can occur. - w.runners[runner.ID] = instance - } - } - - // Cross check what exists in the provider with the DB. - pseudoPoolID, err := w.pseudoPoolID() - if err != nil { - return fmt.Errorf("getting pseudo pool ID: %w", err) - } - listParams := common.ListInstancesParams{ - ListInstancesV011: common.ListInstancesV011Params{ - ProviderBaseParams: common.ProviderBaseParams{ - ControllerInfo: w.controllerInfo, - }, - }, - } - - providerRunners, err := w.provider.ListInstances(w.ctx, pseudoPoolID, listParams) - if err != nil { - return fmt.Errorf("listing instances: %w", err) - } - - providerRunnersByName := make(map[string]commonParams.ProviderInstance) - for _, runner := range providerRunners { - providerRunnersByName[runner.Name] = runner - } - - deleteInstanceParams := common.DeleteInstanceParams{ - DeleteInstanceV011: common.DeleteInstanceV011Params{ - ProviderBaseParams: common.ProviderBaseParams{ - ControllerInfo: w.controllerInfo, - }, - }, - } - - // refresh the map. It may have been mutated above. - dbRunnersByName = w.runnerByName() - for _, runner := range providerRunners { - if _, ok := dbRunnersByName[runner.Name]; !ok { - slog.InfoContext(w.ctx, "runner does not exist in database; removing from provider", "runner_name", runner.Name) - // There is no situation in which the runner will disappear from the provider - // after it was removed from the database. The provider worker will remove the - // instance from the provider nd mark the instance as deleted in the database. - // It is the responsibility of the scaleset worker to then clean up the runners - // in the deleted state. - // That means that if we have a runner in the provider but not the DB, it is most - // likely an inconsistency. - if err := w.provider.DeleteInstance(w.ctx, runner.Name, deleteInstanceParams); err != nil { - slog.ErrorContext(w.ctx, "error removing instance", "instance_name", runner.Name, "error", err) - } - continue - } - } - - for _, runner := range dbRunnersByName { - switch runner.Status { - case commonParams.InstancePendingDelete, commonParams.InstancePendingForceDelete, - commonParams.InstanceDeleting, commonParams.InstanceDeleted: - // This instance is already being deleted. - continue - } - - locked := locking.TryLock(runner.Name, w.consumerID) - if !locked { - slog.DebugContext(w.ctx, "runner is locked; skipping", "runner_name", runner.Name) - continue - } - defer locking.Unlock(runner.Name, false) - - if _, ok := providerRunnersByName[runner.Name]; !ok { - // The runner is not in the provider anymore. Remove it from the DB. - slog.InfoContext(w.ctx, "runner does not exist in provider; removing from database", "runner_name", runner.Name) - if err := w.removeRunnerFromGithubAndSetPendingDelete(runner.Name, runner.AgentID); err != nil { - return fmt.Errorf("removing runner %s: %w", runner.Name, err) - } - } - } - - return nil -} - -func (w *Worker) pseudoPoolID() (string, error) { - // This is temporary. We need to extend providers to know about scale sets. - entity, err := w.scaleSet.GetEntity() - if err != nil { - return "", fmt.Errorf("getting entity: %w", err) - } - return fmt.Sprintf("%s-%s", w.scaleSet.Name, entity.ID), nil -} - -func (w *Worker) handleScaleSetEvent(event dbCommon.ChangePayload) { - scaleSet, ok := event.Payload.(params.ScaleSet) - if !ok { - slog.ErrorContext(w.ctx, "invalid payload for scale set type", "scale_set_type", event.EntityType, "payload", event.Payload) - return - } - switch event.Operation { - case dbCommon.UpdateOperation: - slog.DebugContext(w.ctx, "got update operation") - w.mux.Lock() - - if scaleSet.MaxRunners < w.scaleSet.MaxRunners || !scaleSet.Enabled { - // we stop the listener if the scale set is disabled or if the max runners - // is decreased. In the case where max runners changes but the scale set - // is still enabled, we rely on the keepListenerAlive to restart the listener - // which will listen for new messages with the changed max runners. This way - // we don't have to potentially wait for 50 second for the max runner value - // to be updated, in which time we might get more runners spawned than the - // new max runner value. - if err := w.listener.Stop(); err != nil { - slog.ErrorContext(w.ctx, "error stopping listener", "error", err) - } - } - w.scaleSet = scaleSet - w.mux.Unlock() - default: - slog.DebugContext(w.ctx, "invalid operation type; ignoring", "operation_type", event.Operation) - } -} - -func (w *Worker) handleInstanceCleanup(instance params.Instance) error { - if instance.Status == commonParams.InstanceDeleted { - if err := w.store.DeleteInstanceByName(w.ctx, instance.Name); err != nil { - if !errors.Is(err, runnerErrors.ErrNotFound) { - return fmt.Errorf("deleting instance %s: %w", instance.ID, err) - } - } - } - return nil -} - -func (w *Worker) handleInstanceEntityEvent(event dbCommon.ChangePayload) { - instance, ok := event.Payload.(params.Instance) - if !ok { - slog.ErrorContext(w.ctx, "invalid payload for instance type", "instance_type", event.EntityType, "payload", event.Payload) - return - } - switch event.Operation { - case dbCommon.CreateOperation: - slog.DebugContext(w.ctx, "got create operation") - w.mux.Lock() - w.runners[instance.ID] = instance - w.mux.Unlock() - case dbCommon.UpdateOperation: - slog.DebugContext(w.ctx, "got update operation") - w.mux.Lock() - if instance.Status == commonParams.InstanceDeleted { - if err := w.handleInstanceCleanup(instance); err != nil { - slog.ErrorContext(w.ctx, "error cleaning up instance", "instance_id", instance.ID, "error", err) - } - w.mux.Unlock() - return - } - oldInstance, ok := w.runners[instance.ID] - w.runners[instance.ID] = instance - - if !ok { - slog.DebugContext(w.ctx, "instance not found in local cache; ignoring", "instance_id", instance.ID) - w.mux.Unlock() - return - } - scaleSetCli, err := w.GetScaleSetClient() - if err != nil { - slog.ErrorContext(w.ctx, "error getting scale set client", "error", err) - return - } - if oldInstance.RunnerStatus != instance.RunnerStatus && instance.RunnerStatus == params.RunnerIdle { - serviceRuner, err := scaleSetCli.GetRunner(w.ctx, instance.AgentID) - if err != nil { - slog.ErrorContext(w.ctx, "error getting runner details", "error", err) - w.mux.Unlock() - return - } - status, ok := serviceRuner.Status.(string) - if !ok { - slog.ErrorContext(w.ctx, "error getting runner status", "runner_id", instance.AgentID) - w.mux.Unlock() - return - } - if status != string(params.RunnerIdle) && status != string(params.RunnerActive) { - // nolint:golangci-lint,godox - // TODO: Wait for the status to change for a while (30 seconds?). Mark the instance as - // pending_delete if the runner never comes online. - w.mux.Unlock() - return - } - } - w.mux.Unlock() - case dbCommon.DeleteOperation: - slog.DebugContext(w.ctx, "got delete operation") - w.mux.Lock() - delete(w.runners, instance.ID) - w.mux.Unlock() - default: - slog.DebugContext(w.ctx, "invalid operation type; ignoring", "operation_type", event.Operation) - } -} - -func (w *Worker) handleEvent(event dbCommon.ChangePayload) { - switch event.EntityType { - case dbCommon.ScaleSetEntityType: - slog.DebugContext(w.ctx, "got scaleset event") - w.handleScaleSetEvent(event) - case dbCommon.InstanceEntityType: - slog.DebugContext(w.ctx, "got instance event") - w.handleInstanceEntityEvent(event) - default: - slog.DebugContext(w.ctx, "invalid entity type; ignoring", "entity_type", event.EntityType) - } -} - -func (w *Worker) loop() { - defer w.Stop() - - for { - select { - case <-w.quit: - return - case event, ok := <-w.consumer.Watch(): - if !ok { - slog.InfoContext(w.ctx, "consumer channel closed") - return - } - go w.handleEvent(event) - case <-w.ctx.Done(): - slog.DebugContext(w.ctx, "context done") - return - } - } -} - -func (w *Worker) sleepWithCancel(sleepTime time.Duration) (canceled bool) { - if sleepTime == 0 { - return false - } - ticker := time.NewTicker(sleepTime) - defer ticker.Stop() - - select { - case <-ticker.C: - return false - case <-w.quit: - case <-w.ctx.Done(): - } - return true -} - -func (w *Worker) sessionLoopMayRun() bool { - w.mux.Lock() - defer w.mux.Unlock() - return w.scaleSet.Enabled -} - -func (w *Worker) keepListenerAlive() { - var backoff time.Duration -Loop: - for { - if !w.sessionLoopMayRun() { - if canceled := w.sleepWithCancel(2 * time.Second); canceled { - slog.InfoContext(w.ctx, "worker is stopped; exiting keepListenerAlive") - return - } - continue - } - // noop if already started. - if err := w.listener.Start(); err != nil { - slog.ErrorContext(w.ctx, "error starting listener", "error", err, "consumer_id", w.consumerID) - if canceled := w.sleepWithCancel(2 * time.Second); canceled { - slog.InfoContext(w.ctx, "worker is stopped; exiting keepListenerAlive") - return - } - // we failed to start the listener. Try again. - continue - } - - select { - case <-w.quit: - return - case <-w.ctx.Done(): - return - case <-w.listener.Wait(): - slog.DebugContext(w.ctx, "listener is stopped; attempting to restart") - w.mux.Lock() - if !w.scaleSet.Enabled { - w.listener.Stop() // cleanup - w.mux.Unlock() - continue Loop - } - w.mux.Unlock() - for { - w.mux.Lock() - w.listener.Stop() // cleanup - if !w.scaleSet.Enabled { - w.mux.Unlock() - continue Loop - } - slog.DebugContext(w.ctx, "attempting to restart") - if err := w.listener.Start(); err != nil { - w.mux.Unlock() - slog.ErrorContext(w.ctx, "error restarting listener", "error", err) - switch { - case backoff > 60*time.Second: - backoff = 60 * time.Second - case backoff == 0: - backoff = 5 * time.Second - slog.InfoContext(w.ctx, "backing off restart attempt", "backoff", backoff) - default: - backoff *= 2 - } - slog.ErrorContext(w.ctx, "error restarting listener", "error", err, "backoff", backoff) - if canceled := w.sleepWithCancel(backoff); canceled { - slog.DebugContext(w.ctx, "listener restart canceled") - return - } - continue - } - w.mux.Unlock() - continue Loop - } - } - } -} - -func (w *Worker) handleScaleUp(target, current uint) { - if !w.scaleSet.Enabled { - slog.DebugContext(w.ctx, "scale set is disabled; not scaling up") - return - } - - if target <= current { - slog.DebugContext(w.ctx, "target is less than or equal to current; not scaling up") - return - } - - controllerConfig, err := w.store.ControllerInfo() - if err != nil { - slog.ErrorContext(w.ctx, "error getting controller config", "error", err) - return - } - - scaleSetCli, err := w.GetScaleSetClient() - if err != nil { - slog.ErrorContext(w.ctx, "error getting scale set client", "error", err) - return - } - for i := current; i < target; i++ { - newRunnerName := fmt.Sprintf("%s-%s", w.scaleSet.GetRunnerPrefix(), util.NewID()) - jitConfig, err := scaleSetCli.GenerateJitRunnerConfig(w.ctx, newRunnerName, w.scaleSet.ScaleSetID) - if err != nil { - slog.ErrorContext(w.ctx, "error generating jit config", "error", err) - continue - } - slog.DebugContext(w.ctx, "creating new runner", "runner_name", newRunnerName) - decodedJit, err := jitConfig.DecodedJITConfig() - if err != nil { - slog.ErrorContext(w.ctx, "error decoding jit config", "error", err) - continue - } - runnerParams := params.CreateInstanceParams{ - Name: newRunnerName, - Status: commonParams.InstancePendingCreate, - RunnerStatus: params.RunnerPending, - OSArch: w.scaleSet.OSArch, - OSType: w.scaleSet.OSType, - CallbackURL: controllerConfig.CallbackURL, - MetadataURL: controllerConfig.MetadataURL, - CreateAttempt: 1, - GitHubRunnerGroup: w.scaleSet.GitHubRunnerGroup, - JitConfiguration: decodedJit, - AgentID: jitConfig.Runner.ID, - } - - dbInstance, err := w.store.CreateScaleSetInstance(w.ctx, w.scaleSet.ID, runnerParams) - if err != nil { - slog.ErrorContext(w.ctx, "error creating instance", "error", err) - if err := scaleSetCli.RemoveRunner(w.ctx, jitConfig.Runner.ID); err != nil { - slog.ErrorContext(w.ctx, "error deleting runner", "error", err) - } - continue - } - w.runners[dbInstance.ID] = dbInstance - - _, err = scaleSetCli.GetRunner(w.ctx, jitConfig.Runner.ID) - if err != nil { - slog.ErrorContext(w.ctx, "error getting runner details", "error", err) - continue - } - } -} - -func (w *Worker) waitForToolsOrCancel() (hasTools, stopped bool) { - ticker := time.NewTicker(1 * time.Second) - defer ticker.Stop() - select { - case <-ticker.C: - entity, err := w.scaleSet.GetEntity() - if err != nil { - slog.ErrorContext(w.ctx, "error getting entity", "error", err) - } - if _, err := cache.GetGithubToolsCache(entity.ID); err != nil { - slog.DebugContext(w.ctx, "tools not found in cache; waiting for tools") - return false, false - } - return true, false - case <-w.quit: - return false, true - case <-w.ctx.Done(): - return false, true - } -} - -func (w *Worker) handleScaleDown(target, current uint) { - delta := current - target - if delta <= 0 { - return - } - removed := 0 - candidates := []params.Instance{} - for _, runner := range w.runners { - locked := locking.TryLock(runner.Name, w.consumerID) - if !locked { - slog.DebugContext(w.ctx, "runner is locked; skipping", "runner_name", runner.Name) - continue - } - switch runner.Status { - case commonParams.InstanceRunning: - if runner.RunnerStatus != params.RunnerActive { - candidates = append(candidates, runner) - } - case commonParams.InstancePendingDelete, commonParams.InstancePendingForceDelete, - commonParams.InstanceDeleting, commonParams.InstanceDeleted: - removed++ - locking.Unlock(runner.Name, true) - continue - default: - slog.DebugContext(w.ctx, "runner is not in a valid state; skipping", "runner_name", runner.Name, "runner_status", runner.Status) - locking.Unlock(runner.Name, false) - continue - } - locking.Unlock(runner.Name, false) - } - - if removed >= int(delta) { - return - } - - for _, runner := range candidates { - if removed >= int(delta) { - break - } - - locked := locking.TryLock(runner.Name, w.consumerID) - if !locked { - slog.DebugContext(w.ctx, "runner is locked; skipping", "runner_name", runner.Name) - continue - } - - switch runner.Status { - case commonParams.InstancePendingCreate, commonParams.InstanceRunning: - case commonParams.InstancePendingDelete, commonParams.InstancePendingForceDelete, - commonParams.InstanceDeleting, commonParams.InstanceDeleted: - removed++ - locking.Unlock(runner.Name, true) - continue - default: - slog.DebugContext(w.ctx, "runner is not in a valid state; skipping", "runner_name", runner.Name, "runner_status", runner.Status) - locking.Unlock(runner.Name, false) - continue - } - - switch runner.RunnerStatus { - case params.RunnerTerminated, params.RunnerActive: - slog.DebugContext(w.ctx, "runner is not in a valid state; skipping", "runner_name", runner.Name, "runner_status", runner.RunnerStatus) - locking.Unlock(runner.Name, false) - continue - } - - scaleSetCli, err := w.GetScaleSetClient() - if err != nil { - slog.ErrorContext(w.ctx, "error getting scale set client", "error", err) - return - } - slog.DebugContext(w.ctx, "removing runner", "runner_name", runner.Name) - if err := scaleSetCli.RemoveRunner(w.ctx, runner.AgentID); err != nil { - if !errors.Is(err, runnerErrors.ErrNotFound) { - slog.ErrorContext(w.ctx, "error removing runner", "runner_name", runner.Name, "error", err) - locking.Unlock(runner.Name, false) - continue - } - } - runnerUpdateParams := params.UpdateInstanceParams{ - Status: commonParams.InstancePendingDelete, - } - if _, err := w.store.UpdateInstance(w.ctx, runner.Name, runnerUpdateParams); err != nil { - if errors.Is(err, runnerErrors.ErrNotFound) { - // The error seems to be that the instance was removed from the database. We still had it in our - // state, so either the update never came from the watcher or something else happened. - // Remove it from the local cache. - delete(w.runners, runner.ID) - removed++ - locking.Unlock(runner.Name, true) - continue - } - // nolint:golangci-lint,godox - // TODO: This should not happen, unless there is some issue with the database. - // The UpdateInstance() function should add tenacity, but even in that case, if it - // still errors out, we need to handle it somehow. - slog.ErrorContext(w.ctx, "error updating runner", "runner_name", runner.Name, "error", err) - locking.Unlock(runner.Name, false) - continue - } - removed++ - locking.Unlock(runner.Name, false) - } -} - -func (w *Worker) handleAutoScale() { - ticker := time.NewTicker(5 * time.Second) - defer ticker.Stop() - - lastMsg := "" - lastMsgDebugLog := func(msg string, targetRunners, currentRunners uint) { - if lastMsg != msg { - slog.DebugContext(w.ctx, msg, "current_runners", currentRunners, "target_runners", targetRunners) - lastMsg = msg - } - } - - for { - hasTools, stopped := w.waitForToolsOrCancel() - if stopped { - slog.DebugContext(w.ctx, "worker is stopped; exiting handleAutoScale") - return - } - - if !hasTools { - w.sleepWithCancel(1 * time.Second) - continue - } - - select { - case <-w.quit: - return - case <-w.ctx.Done(): - return - case <-ticker.C: - w.mux.Lock() - for _, instance := range w.runners { - if err := w.handleInstanceCleanup(instance); err != nil { - slog.ErrorContext(w.ctx, "error cleaning up instance", "instance_id", instance.ID, "error", err) - } - } - var desiredRunners uint - if w.scaleSet.DesiredRunnerCount > 0 { - desiredRunners = uint(w.scaleSet.DesiredRunnerCount) - } - targetRunners := min(w.scaleSet.MinIdleRunners+desiredRunners, w.scaleSet.MaxRunners) - - currentRunners := uint(len(w.runners)) - if currentRunners == targetRunners { - lastMsgDebugLog("desired runner count reached", targetRunners, currentRunners) - w.mux.Unlock() - continue - } - - if currentRunners < targetRunners { - lastMsgDebugLog("scaling up", targetRunners, currentRunners) - w.handleScaleUp(targetRunners, currentRunners) - } else { - lastMsgDebugLog("attempting to scale down", targetRunners, currentRunners) - w.handleScaleDown(targetRunners, currentRunners) - } - w.mux.Unlock() - } - } -} diff --git a/workers/scaleset/scaleset_helper.go b/workers/scaleset/scaleset_helper.go deleted file mode 100644 index c04c92a2..00000000 --- a/workers/scaleset/scaleset_helper.go +++ /dev/null @@ -1,181 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package scaleset - -import ( - "errors" - "fmt" - "log/slog" - - runnerErrors "github.com/cloudbase/garm-provider-common/errors" - commonParams "github.com/cloudbase/garm-provider-common/params" - "github.com/cloudbase/garm/cache" - "github.com/cloudbase/garm/locking" - "github.com/cloudbase/garm/params" - "github.com/cloudbase/garm/util/github/scalesets" -) - -func (w *Worker) GetScaleSetClient() (*scalesets.ScaleSetClient, error) { - scaleSetEntity, err := w.scaleSet.GetEntity() - if err != nil { - return nil, fmt.Errorf("getting entity: %w", err) - } - - ghCli, ok := cache.GetGithubClient(scaleSetEntity.ID) - if !ok { - return nil, fmt.Errorf("getting github client: %w", err) - } - scaleSetClient, err := scalesets.NewClient(ghCli) - if err != nil { - return nil, fmt.Errorf("creating scale set client: %w", err) - } - - return scaleSetClient, nil -} - -func (w *Worker) GetScaleSet() params.ScaleSet { - return w.scaleSet -} - -func (w *Worker) Owner() string { - return fmt.Sprintf("garm-%s", w.controllerInfo.ControllerID) -} - -func (w *Worker) SetLastMessageID(id int64) error { - if err := w.store.SetScaleSetLastMessageID(w.ctx, w.scaleSet.ID, id); err != nil { - return fmt.Errorf("setting last message ID: %w", err) - } - return nil -} - -func (w *Worker) recordOrUpdateJob(job params.ScaleSetJobMessage) error { - entity, err := w.scaleSet.GetEntity() - if err != nil { - return fmt.Errorf("getting entity: %w", err) - } - asUUID, err := entity.GetIDAsUUID() - if err != nil { - return fmt.Errorf("getting entity ID as UUID: %w", err) - } - - jobParams := job.ToJob() - jobParams.RunnerGroupName = w.scaleSet.GitHubRunnerGroup - - switch entity.EntityType { - case params.ForgeEntityTypeEnterprise: - jobParams.EnterpriseID = &asUUID - case params.ForgeEntityTypeRepository: - jobParams.RepoID = &asUUID - case params.ForgeEntityTypeOrganization: - jobParams.OrgID = &asUUID - default: - return fmt.Errorf("unknown entity type: %s --> %s", entity.EntityType, entity) - } - - if _, jobErr := w.store.CreateOrUpdateJob(w.ctx, jobParams); jobErr != nil { - slog.With(slog.Any("error", jobErr)).ErrorContext( - w.ctx, "failed to update job", "job_id", jobParams.ID) - } - return nil -} - -// HandleJobCompleted handles a job completed message. If a job had a runner -// assigned and was not canceled before it had a chance to run, then we mark -// that runner as pending_delete. -func (w *Worker) HandleJobsCompleted(jobs []params.ScaleSetJobMessage) (err error) { - slog.DebugContext(w.ctx, "handling job completed", "jobs", jobs) - defer slog.DebugContext(w.ctx, "finished handling job completed", "jobs", jobs, "error", err) - - for _, job := range jobs { - if err := w.recordOrUpdateJob(job); err != nil { - // recording scale set jobs are purely informational for now. - slog.ErrorContext(w.ctx, "failed to save job data", "job", job, "error", err) - } - - if job.RunnerName == "" { - // This job was not assigned to a runner, so we can skip it. - continue - } - // Set the runner to pending_delete. - runnerUpdateParams := params.UpdateInstanceParams{ - Status: commonParams.InstancePendingDelete, - RunnerStatus: params.RunnerTerminated, - } - - locking.Lock(job.RunnerName, w.consumerID) - _, err := w.store.UpdateInstance(w.ctx, job.RunnerName, runnerUpdateParams) - if err != nil { - if !errors.Is(err, runnerErrors.ErrNotFound) { - locking.Unlock(job.RunnerName, false) - return fmt.Errorf("updating runner %s: %w", job.RunnerName, err) - } - } - locking.Unlock(job.RunnerName, false) - } - return nil -} - -// HandleJobStarted updates the runners from idle to active in the DB and -// assigns the job to them. -func (w *Worker) HandleJobsStarted(jobs []params.ScaleSetJobMessage) (err error) { - slog.DebugContext(w.ctx, "handling job started", "jobs", jobs) - defer slog.DebugContext(w.ctx, "finished handling job started", "jobs", jobs, "error", err) - for _, job := range jobs { - if err := w.recordOrUpdateJob(job); err != nil { - // recording scale set jobs are purely informational for now. - slog.ErrorContext(w.ctx, "failed to save job data", "job", job, "error", err) - } - - if job.RunnerName == "" { - // This should not happen, but just in case. - continue - } - - updateParams := params.UpdateInstanceParams{ - RunnerStatus: params.RunnerActive, - } - - locking.Lock(job.RunnerName, w.consumerID) - _, err := w.store.UpdateInstance(w.ctx, job.RunnerName, updateParams) - if err != nil { - if errors.Is(err, runnerErrors.ErrNotFound) { - slog.InfoContext(w.ctx, "runner not found; handled by some other controller?", "runner_name", job.RunnerName) - locking.Unlock(job.RunnerName, true) - continue - } - locking.Unlock(job.RunnerName, false) - return fmt.Errorf("updating runner %s: %w", job.RunnerName, err) - } - locking.Unlock(job.RunnerName, false) - } - return nil -} - -func (w *Worker) HandleJobsAvailable(jobs []params.ScaleSetJobMessage) error { - slog.DebugContext(w.ctx, "handling jobs available", "jobs", jobs) - for _, job := range jobs { - if err := w.recordOrUpdateJob(job); err != nil { - // recording scale set jobs are purely informational for now. - slog.ErrorContext(w.ctx, "failed to save job data", "job", job, "error", err) - } - } - return nil -} - -func (w *Worker) SetDesiredRunnerCount(count int) error { - if err := w.store.SetScaleSetDesiredRunnerCount(w.ctx, w.scaleSet.ID, count); err != nil { - return fmt.Errorf("setting desired runner count: %w", err) - } - return nil -} diff --git a/workers/scaleset/scaleset_listener.go b/workers/scaleset/scaleset_listener.go deleted file mode 100644 index 7808f9f6..00000000 --- a/workers/scaleset/scaleset_listener.go +++ /dev/null @@ -1,292 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package scaleset - -import ( - "context" - "errors" - "fmt" - "log/slog" - "sync" - - runnerErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm/params" - "github.com/cloudbase/garm/util/github/scalesets" -) - -var closed = make(chan struct{}) - -func init() { close(closed) } - -func newListener(ctx context.Context, scaleSetHelper scaleSetHelper) *scaleSetListener { - return &scaleSetListener{ - ctx: ctx, - scaleSetHelper: scaleSetHelper, - lastMessageID: scaleSetHelper.GetScaleSet().LastMessageID, - } -} - -type scaleSetListener struct { - // ctx is the global context for the worker - ctx context.Context - // listenerCtx is the context for the listener. We pass this - // context to GetMessages() which blocks until a message is - // available. We need to be able to cancel that longpoll request - // independent of the worker context, in case we need to restart - // the listener without restarting the worker. - listenerCtx context.Context - cancelFunc context.CancelFunc - lastMessageID int64 - - scaleSetHelper scaleSetHelper - messageSession *scalesets.MessageSession - - mux sync.Mutex - running bool - quit chan struct{} - loopExited chan struct{} -} - -func (l *scaleSetListener) Start() error { - slog.DebugContext(l.ctx, "starting scale set listener", "scale_set", l.scaleSetHelper.GetScaleSet().ScaleSetID) - l.mux.Lock() - defer l.mux.Unlock() - - if l.running { - return nil - } - - l.listenerCtx, l.cancelFunc = context.WithCancel(context.Background()) - scaleSet := l.scaleSetHelper.GetScaleSet() - scaleSetClient, err := l.scaleSetHelper.GetScaleSetClient() - if err != nil { - return fmt.Errorf("getting scale set client: %w", err) - } - slog.DebugContext(l.ctx, "creating new message session", "scale_set", scaleSet.ScaleSetID) - session, err := scaleSetClient.CreateMessageSession( - l.listenerCtx, scaleSet.ScaleSetID, - l.scaleSetHelper.Owner(), - ) - if err != nil { - return fmt.Errorf("creating message session: %w", err) - } - l.messageSession = session - l.quit = make(chan struct{}) - l.running = true - l.loopExited = make(chan struct{}) - go l.loop() - - return nil -} - -func (l *scaleSetListener) Stop() error { - l.mux.Lock() - defer l.mux.Unlock() - - if !l.running { - return nil - } - scaleSetClient, err := l.scaleSetHelper.GetScaleSetClient() - if err != nil { - return fmt.Errorf("getting scale set client: %w", err) - } - if l.messageSession != nil { - slog.DebugContext(l.ctx, "closing message session", "scale_set", l.scaleSetHelper.GetScaleSet().ScaleSetID) - if err := l.messageSession.Close(); err != nil { - slog.ErrorContext(l.ctx, "closing message session", "error", err) - } - if err := scaleSetClient.DeleteMessageSession(context.Background(), l.messageSession); err != nil { - slog.ErrorContext(l.ctx, "error deleting message session", "error", err) - } - } - - l.running = false - close(l.quit) - l.cancelFunc() - return nil -} - -func (l *scaleSetListener) IsRunning() bool { - l.mux.Lock() - defer l.mux.Unlock() - return l.running -} - -func (l *scaleSetListener) handleSessionMessage(msg params.RunnerScaleSetMessage) { - l.mux.Lock() - defer l.mux.Unlock() - - if params.ScaleSetMessageType(msg.MessageType) != params.MessageTypeRunnerScaleSetJobMessages { - slog.DebugContext(l.ctx, "message is not a job message, ignoring") - return - } - - body, err := msg.GetJobsFromBody() - if err != nil { - slog.ErrorContext(l.ctx, "getting jobs from body", "error", err) - } - - if msg.MessageID < l.lastMessageID { - slog.DebugContext(l.ctx, "message is older than last message, ignoring") - return - } - - var completedJobs []params.ScaleSetJobMessage - var availableJobs []params.ScaleSetJobMessage - var startedJobs []params.ScaleSetJobMessage - var assignedJobs []params.ScaleSetJobMessage - - for _, job := range body { - switch job.MessageType { - case params.MessageTypeJobAssigned: - slog.InfoContext(l.ctx, "new job assigned", "job_id", job.JobID, "job_name", job.JobDisplayName) - assignedJobs = append(assignedJobs, job) - case params.MessageTypeJobStarted: - slog.InfoContext(l.ctx, "job started", "job_id", job.JobID, "job_name", job.JobDisplayName, "runner_name", job.RunnerName) - startedJobs = append(startedJobs, job) - case params.MessageTypeJobCompleted: - slog.InfoContext(l.ctx, "job completed", "job_id", job.JobID, "job_name", job.JobDisplayName, "runner_name", job.RunnerName) - completedJobs = append(completedJobs, job) - case params.MessageTypeJobAvailable: - slog.InfoContext(l.ctx, "job available", "job_id", job.JobID, "job_name", job.JobDisplayName) - availableJobs = append(availableJobs, job) - default: - slog.DebugContext(l.ctx, "unknown message type", "message_type", job.MessageType) - } - } - - scaleSetClient, err := l.scaleSetHelper.GetScaleSetClient() - if err != nil { - slog.ErrorContext(l.ctx, "getting scale set client", "error", err) - return - } - if len(availableJobs) > 0 { - jobIDs := make([]int64, len(availableJobs)) - for idx, job := range availableJobs { - jobIDs[idx] = job.RunnerRequestID - } - idsAcquired, err := scaleSetClient.AcquireJobs( - l.listenerCtx, l.scaleSetHelper.GetScaleSet().ScaleSetID, - l.messageSession.MessageQueueAccessToken(), jobIDs) - if err != nil { - // don't mark message as processed. It will be requeued. - slog.ErrorContext(l.ctx, "acquiring jobs", "error", err) - return - } - // HandleJobsAvailable only records jobs in the database for now. The jobs are purely - // informational, so an error here won't break anything. - if err := l.scaleSetHelper.HandleJobsAvailable(availableJobs); err != nil { - slog.ErrorContext(l.ctx, "error handling available jobs", "error", err) - } - slog.DebugContext(l.ctx, "acquired jobs", "job_ids", idsAcquired) - } - - if len(assignedJobs) > 0 { - if err := l.scaleSetHelper.HandleJobsAvailable(assignedJobs); err != nil { - slog.ErrorContext(l.ctx, "error handling available jobs", "error", err) - } - } - - if len(startedJobs) > 0 { - if err := l.scaleSetHelper.HandleJobsStarted(startedJobs); err != nil { - slog.ErrorContext(l.ctx, "error handling started jobs", "error", err) - return - } - } - - if len(completedJobs) > 0 { - if err := l.scaleSetHelper.HandleJobsCompleted(completedJobs); err != nil { - slog.ErrorContext(l.ctx, "error handling completed jobs", "error", err) - return - } - } - - if err := l.scaleSetHelper.SetLastMessageID(msg.MessageID); err != nil { - slog.ErrorContext(l.ctx, "setting last message ID", "error", err) - } else { - l.lastMessageID = msg.MessageID - } - - if err := l.scaleSetHelper.SetDesiredRunnerCount(msg.Statistics.TotalAssignedJobs); err != nil { - slog.ErrorContext(l.ctx, "setting desired runner count", "error", err) - } - - if err := l.messageSession.DeleteMessage(l.listenerCtx, msg.MessageID); err != nil { - slog.ErrorContext(l.ctx, "deleting message", "error", err) - } -} - -func (l *scaleSetListener) loop() { - defer close(l.loopExited) - defer l.Stop() - retryAfterUnauthorized := false - - slog.DebugContext(l.ctx, "starting scale set listener loop", "scale_set", l.scaleSetHelper.GetScaleSet().ScaleSetID) - for { - select { - case <-l.quit: - return - case <-l.listenerCtx.Done(): - slog.DebugContext(l.ctx, "stopping scale set listener") - return - case <-l.ctx.Done(): - slog.DebugContext(l.ctx, "scaleset worker has stopped") - return - default: - slog.DebugContext(l.ctx, "getting message", "last_message_id", l.lastMessageID, "max_runners", l.scaleSetHelper.GetScaleSet().MaxRunners) - msg, err := l.messageSession.GetMessage( - l.listenerCtx, l.lastMessageID, l.scaleSetHelper.GetScaleSet().MaxRunners) - if err != nil { - if errors.Is(err, runnerErrors.ErrUnauthorized) { - if retryAfterUnauthorized { - slog.DebugContext(l.ctx, "unauthorized, stopping listener") - return - } - // The session manager refreshes the token automatically, but once we call - // GetMessage(), it blocks until a new message is sent on the longpoll. - // If there are no messages for a while, the token used to longpoll expires - // and we get an unauthorized error. We simply need to retry the request - // and it should use the refreshed token. If we fail a second time, we can - // return and the scaleset worker will attempt to restart the listener. - retryAfterUnauthorized = true - slog.DebugContext(l.ctx, "got unauthorized error, retrying") - continue - } - if !errors.Is(err, context.Canceled) { - slog.ErrorContext(l.ctx, "getting message", "error", err) - } - slog.DebugContext(l.ctx, "stopping scale set listener") - return - } - retryAfterUnauthorized = false - if !msg.IsNil() { - // Longpoll returns after 50 seconds. If no message arrives during that interval - // we get a nil message. We can simply ignore it and continue. - slog.DebugContext(l.ctx, "handling message", "message_id", msg.MessageID) - l.handleSessionMessage(msg) - } - } - } -} - -func (l *scaleSetListener) Wait() <-chan struct{} { - l.mux.Lock() - defer l.mux.Unlock() - - if !l.running { - slog.DebugContext(l.ctx, "scale set listener is not running") - return closed - } - return l.loopExited -} diff --git a/workers/scaleset/util.go b/workers/scaleset/util.go deleted file mode 100644 index 7852cb89..00000000 --- a/workers/scaleset/util.go +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2025 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. -package scaleset - -import ( - dbCommon "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/database/watcher" - "github.com/cloudbase/garm/params" -) - -func composeControllerWatcherFilters(entity params.ForgeEntity) dbCommon.PayloadFilterFunc { - return watcher.WithAny( - watcher.WithAll( - watcher.WithEntityScaleSetFilter(entity), - watcher.WithAny( - watcher.WithOperationTypeFilter(dbCommon.CreateOperation), - watcher.WithOperationTypeFilter(dbCommon.UpdateOperation), - watcher.WithOperationTypeFilter(dbCommon.DeleteOperation), - ), - ), - watcher.WithAll( - watcher.WithEntityFilter(entity), - watcher.WithOperationTypeFilter(dbCommon.UpdateOperation), - ), - ) -}