diff --git a/.github/workflows/build-and-push.yaml b/.github/workflows/build-and-push.yaml index 9bb12b8..c32e38f 100644 --- a/.github/workflows/build-and-push.yaml +++ b/.github/workflows/build-and-push.yaml @@ -1,51 +1,58 @@ -name: ci +# +name: Create and publish a Docker image +# Configures this workflow to run every time a change is pushed. on: push -jobs: - build: - runs-on: ubuntu-22.04 +# Defines two custom environment variables for the workflow. These are used for the Container registry domain, and a name for the Docker image that this workflow builds. +env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} +# There is a single job in this workflow. It's configured to run on the latest available version of Ubuntu. +jobs: + build-and-push-image: + runs-on: ubuntu-latest + # Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job. + permissions: + contents: read + packages: write + attestations: write + id-token: write + # steps: - - - name: Repository meta - id: repository - run: | - registry=${{ github.server_url }} - registry=${registry##http*://} - echo "registry=${registry}" >> "$GITHUB_OUTPUT" - echo "registry=${registry}" - repository="$(echo "${{ github.repository }}" | tr '[:upper:]' '[:lower:]')" - echo "repository=${repository}" >> "$GITHUB_OUTPUT" - echo "repository=${repository}" - - - name: Docker meta - uses: docker/metadata-action@v5 - id: docker + - name: Checkout repository + uses: actions/checkout@v4 + # Uses the `docker/login-action` action to log in to the Container registry using the account and password that will publish the packages. Once published, the packages are scoped to the account defined here. + - name: Log in to the Container registry + uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1 with: - images: ${{ steps.repository.outputs.registry }}/${{ steps.repository.outputs.repository }} - - - name: Login to registry - uses: docker/login-action@v3 + registry: ${{ env.REGISTRY }} + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + # This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels. + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7 with: - registry: ${{ steps.repository.outputs.registry }} - username: ${{ secrets.PACKAGES_USER }} - password: ${{ secrets.PACKAGES_TOKEN }} - - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - with: - buildkitd-flags: '--allow-insecure-entitlement network.host' - driver-opts: network=host - - - name: Build and push - uses: docker/build-push-action@v6 + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + tags: ${{ github.sha }} + # This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages. + # It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository. + # It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step. + - name: Build and push Docker image + id: push + uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4 with: + context: . push: true - allow: network.host - network: host - platforms: linux/amd64,linux/arm64 - tags: ${{ steps.docker.outputs.tags }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + + # This step generates an artifact attestation for the image, which is an unforgeable statement about where and how it was built. It increases supply chain security for people who consume the image. For more information, see "[AUTOTITLE](/actions/security-guides/using-artifact-attestations-to-establish-provenance-for-builds)." + - name: Generate artifact attestation + uses: actions/attest-build-provenance@v1 + with: + subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}} + subject-digest: ${{ steps.push.outputs.digest }} + push-to-registry: true diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml new file mode 100644 index 0000000..f63e64e --- /dev/null +++ b/.github/workflows/pr.yaml @@ -0,0 +1,19 @@ +name: PR + +on: + pull_request: + types: [opened, ready_for_review, synchronize] + +jobs: + build: + runs-on: ubuntu-22.04 + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: 18 + - name: Run tests + run: | + yarn install --frozen-lockfile --network-timeout 600000 + yarn tsc diff --git a/.yarnrc.yml b/.yarnrc.yml deleted file mode 100644 index 3186f3f..0000000 --- a/.yarnrc.yml +++ /dev/null @@ -1 +0,0 @@ -nodeLinker: node-modules diff --git a/Dockerfile b/Dockerfile index 6d4d598..817041c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ # Stage 1 - Create yarn install skeleton layer -FROM node:20.18.1 AS packages +FROM node:18-bookworm-slim AS packages WORKDIR /app COPY package.json yarn.lock ./ @@ -12,11 +12,21 @@ COPY plugins plugins RUN find packages \! -name "package.json" -mindepth 2 -maxdepth 2 -exec rm -rf {} \+ # Stage 2 - Install dependencies and build packages -FROM node:20.18.1 AS build +FROM node:18-bookworm-slim AS build -# Required for arm64 -RUN apt update -y -RUN apt install -y python3 make gcc build-essential bash +# Install isolate-vm dependencies, these are needed by the @backstage/plugin-scaffolder-backend. +RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ + --mount=type=cache,target=/var/lib/apt,sharing=locked \ + apt-get update && \ + apt-get install -y --no-install-recommends python3 g++ build-essential git && \ + yarn config set python /usr/bin/python3 + +# Install sqlite3 dependencies. You can skip this if you don't use sqlite3 in the image, +# in which case you should also move better-sqlite3 to "devDependencies" in package.json. +RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ + --mount=type=cache,target=/var/lib/apt,sharing=locked \ + apt-get update && \ + apt-get install -y --no-install-recommends libsqlite3-dev USER node WORKDIR /app @@ -24,7 +34,7 @@ WORKDIR /app COPY --from=packages --chown=node:node /app . RUN --mount=type=cache,target=/home/node/.cache/yarn,sharing=locked,uid=1000,gid=1000 \ - yarn install --network-timeout 600000 + yarn install --frozen-lockfile --network-timeout 600000 COPY --chown=node:node . . @@ -38,33 +48,46 @@ RUN mkdir packages/backend/dist/skeleton packages/backend/dist/bundle \ && tar xzf packages/backend/dist/bundle.tar.gz -C packages/backend/dist/bundle # Stage 3 - Build the actual backend image and install production dependencies -FROM node:20.18.1 +FROM node:18-bookworm-slim # Install isolate-vm dependencies, these are needed by the @backstage/plugin-scaffolder-backend. -# Install packages needed to get utility binaries RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ --mount=type=cache,target=/var/lib/apt,sharing=locked \ apt-get update && \ - apt-get install -y --no-install-recommends python3 python3-pip python3-venv g++ build-essential ca-certificates curl + apt-get install -y --no-install-recommends python3 g++ build-essential && \ + yarn config set python /usr/bin/python3 -RUN yarn config set python /usr/bin/python3 +# Install sqlite3 dependencies. You can skip this if you don't use sqlite3 in the image, +# in which case you should also move better-sqlite3 to "devDependencies" in package.json. +RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ + --mount=type=cache,target=/var/lib/apt,sharing=locked \ + apt-get update && \ + apt-get install -y --no-install-recommends libsqlite3-dev -# Add kubectl for the kube apply plugin. -# Add mkdocs for the TechDocs plugin. -RUN if test "$(uname -m)" = "x86_64"; \ - then \ - curl -L -o /usr/local/bin/kubectl https://dl.k8s.io/release/v1.29.9/bin/linux/amd64/kubectl; \ - fi -RUN if test "$(uname -m)" != "x86_64"; \ - then \ - curl -L -o /usr/local/bin/kubectl https://dl.k8s.io/release/v1.29.9/bin/linux/arm64/kubectl; \ - fi -RUN chmod +x /usr/local/bin/kubectl +# Add kubectl. +RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ + --mount=type=cache,target=/var/lib/apt,sharing=locked \ + apt-get update && \ + apt-get install -y --no-install-recommends apt-transport-https ca-certificates curl gpg -ENV VIRTUAL_ENV=/opt/venv -RUN python3 -m venv $VIRTUAL_ENV -ENV PATH="$VIRTUAL_ENV/bin:$PATH" -RUN pip3 install 'mkdocs-techdocs-core==1.4.2' 'mkdocs-awesome-pages-plugin==2.10.1' +RUN curl -fsSL https://pkgs.k8s.io/core:/stable:/v1.29/deb/Release.key | gpg --dearmor -o /etc/apt/keyrings/kubernetes-apt-keyring.gpg && \ + echo 'deb [signed-by=/etc/apt/keyrings/kubernetes-apt-keyring.gpg] https://pkgs.k8s.io/core:/stable:/v1.29/deb/ /' | tee /etc/apt/sources.list.d/kubernetes.list + +RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ + --mount=type=cache,target=/var/lib/apt,sharing=locked \ + apt-get update && \ + apt-get install -y --no-install-recommends kubectl + +# Add cnoe cli. +RUN curl -L -O https://github.com/cnoe-io/cnoe-cli/releases/download/v0.1.0/cnoe_Linux_x86_64.tar.gz && \ + curl -L -O https://github.com/cnoe-io/cnoe-cli/releases/download/v0.1.0/checksums.txt && \ + sha256sum -c --strict --status --ignore-missing checksums.txt && \ + tar -xzf cnoe_Linux_x86_64.tar.gz && \ + mv cnoe /usr/bin/cnoe-cli && \ + chmod +x /usr/bin/cnoe-cli + +COPY ./cnoe-wrapper.sh /usr/bin/cnoe +RUN chmod +x /usr/bin/cnoe # From here on we use the least-privileged `node` user to run the backend. USER node @@ -80,7 +103,7 @@ WORKDIR /app COPY --from=build --chown=node:node /app/yarn.lock /app/package.json /app/packages/backend/dist/skeleton/ ./ RUN --mount=type=cache,target=/home/node/.cache/yarn,sharing=locked,uid=1000,gid=1000 \ - yarn install --production --network-timeout 600000 + yarn install --frozen-lockfile --production --network-timeout 600000 # Copy the built packages from the build stage COPY --from=build --chown=node:node /app/packages/backend/dist/bundle/ ./ diff --git a/README.md b/README.md index 61707f6..9279405 100644 --- a/README.md +++ b/README.md @@ -1,25 +1,43 @@ -# EDP Backstage +# CNOE Backstage -The EDP bespoke version of backstage. - -With respect to the CNOE stack (where eDF originates from) it is comparable to https://github.com/cnoe-io/backstage-app - -At the time writing CNOE-backstage-app is "version": "1.28.4" +This repository contains code for the [Backstage](https://backstage.io) images used by the CNOE stacks. ## Container Images -Container images are pushed to the Cefor Container Registry and available [here](https://forgejo.edf-bootstrap.cx.fg1.ffm.osc.live/DevFW-CICD/-/packages/container/backstage-edp/). +Container images are pushed to the GitHub Container Registry and available [here](https://github.com/cnoe-io/backstage-app/pkgs/container/backstage-app). ## Local Development -Use of [**edpbuilder**](https://forgejo.edf-bootstrap.cx.fg1.ffm.osc.live/DevFW/edpbuilder.git) is recommended for local setup. +Use of [**idpbuilder**](https://github.com/cnoe-io/idpbuilder) is recommended for local setup. +See [the instructions](https://github.com/cnoe-io/idpbuilder?tab=readme-ov-file#getting-started) in the idpbuilder repository for details. ### Create your local cluster -Once edpbuilder is installed on your computer, create a stack that you are interested in. For example: +Once idpbuilder is installed on your computer, create a stack that you are interested in. For example: -> Hint: From here on this is the old CNOE README .... no guarantee that this works as described! +```bash +./idpbuilder create -p https://github.com/cnoe-io/stacks//ref-implementation +``` + +Wait for ArgoCD applications to be healthy: + +```bash +$ kubectl get applications -A + +NAMESPACE NAME SYNC STATUS HEALTH STATUS +argocd argo-workflows Synced Healthy +argocd argocd Synced Healthy +argocd backstage Synced Healthy +argocd backstage-templates Synced Healthy +argocd coredns Synced Healthy +argocd external-secrets Synced Healthy +argocd gitea Synced Healthy +argocd keycloak Synced Healthy +argocd metric-server Synced Healthy +argocd nginx Synced Healthy +argocd spark-operator Synced Healthy +``` ### Update Backstage application config diff --git a/app-config.yaml b/app-config.yaml index 0006a70..9c3f7ee 100644 --- a/app-config.yaml +++ b/app-config.yaml @@ -66,7 +66,6 @@ auth: session: secret: abcdfkjalskdfjkla providers: - guest: {} keycloak-oidc: development: metadataUrl: https://cnoe.localtest.me:8443/keycloak/realms/cnoe/.well-known/openid-configuration @@ -132,4 +131,4 @@ argocd: # replace with your argocd password e.g. kubectl -n argocd get secret argocd-initial-admin-secret -o jsonpath="{.data.password}" | base64 -d password: ${ARGOCD_ADMIN_PASSWORD} argoWorkflows: - baseUrl: https://cnoe.localtest.me:8443/argo-workflows \ No newline at end of file + baseUrl: https://cnoe.localtest.me:8443/argo-workflows diff --git a/backstage.json b/backstage.json index c6aea75..b67976c 100644 --- a/backstage.json +++ b/backstage.json @@ -1,3 +1,3 @@ { - "version": "1.38.1" + "version": "1.28.4" } diff --git a/package.json b/package.json index 7e09d0f..a7f7fc5 100644 --- a/package.json +++ b/package.json @@ -31,7 +31,7 @@ ] }, "devDependencies": { - "@backstage/cli": "^0.32.0", + "@backstage/cli": "^0.26.10", "@backstage/e2e-test-utils": "^0.1.1", "@playwright/test": "^1.32.3", "@spotify/prettier-config": "^12.0.0", diff --git a/packages/app/package.json b/packages/app/package.json index aba7302..3fc1f77 100644 --- a/packages/app/package.json +++ b/packages/app/package.json @@ -16,31 +16,35 @@ "dependencies": { "@backstage-community/plugin-github-actions": "^0.6.16", "@backstage-community/plugin-tech-radar": "^0.7.4", - "@backstage/app-defaults": "^1.6.1", - "@backstage/catalog-model": "^1.7.3", - "@backstage/cli": "^0.32.0", - "@backstage/core-app-api": "^1.16.1", - "@backstage/core-components": "^0.17.1", - "@backstage/core-plugin-api": "^1.10.6", - "@backstage/integration-react": "^1.2.6", - "@backstage/plugin-api-docs": "^0.12.6", - "@backstage/plugin-catalog": "^1.29.0", - "@backstage/plugin-catalog-common": "^1.1.3", - "@backstage/plugin-catalog-graph": "^0.4.18", - "@backstage/plugin-catalog-import": "^0.12.13", - "@backstage/plugin-catalog-react": "^1.17.0", - "@backstage/plugin-home": "^0.8.7", - "@backstage/plugin-kubernetes": "^0.12.6", - "@backstage/plugin-org": "^0.6.38", - "@backstage/plugin-permission-react": "^0.4.33", - "@backstage/plugin-scaffolder": "^1.30.1", - "@backstage/plugin-search": "^1.4.25", - "@backstage/plugin-search-react": "^1.8.8", - "@backstage/plugin-techdocs": "^1.12.5", - "@backstage/plugin-techdocs-module-addons-contrib": "^1.1.23", - "@backstage/plugin-techdocs-react": "^1.2.16", - "@backstage/plugin-user-settings": "^0.8.21", - "@backstage/theme": "^0.6.5", + "@backstage/app-defaults": "^1.5.7", + "@backstage/catalog-model": "^1.5.0", + "@backstage/cli": "^0.26.10", + "@backstage/core-app-api": "^1.13.0", + "@backstage/core-components": "^0.14.8", + "@backstage/core-plugin-api": "^1.9.3", + "@backstage/integration-react": "^1.1.28", + "@backstage/plugin-api-docs": "^0.11.6", + "@backstage/plugin-catalog": "^1.21.0", + "@backstage/plugin-catalog-common": "^1.0.24", + "@backstage/plugin-catalog-graph": "^0.4.6", + "@backstage/plugin-catalog-import": "^0.12.0", + "@backstage/plugin-catalog-react": "^1.12.1", + "@backstage/plugin-home": "^0.7.6", + "@backstage/plugin-kubernetes": "^0.11.11", + "@backstage/plugin-org": "^0.6.26", + "@backstage/plugin-permission-react": "^0.4.23", + "@backstage/plugin-scaffolder": "^1.22.0", + "@backstage/plugin-search": "^1.4.13", + "@backstage/plugin-search-react": "^1.7.12", + "@backstage/plugin-techdocs": "^1.10.6", + "@backstage/plugin-techdocs-module-addons-contrib": "^1.1.11", + "@backstage/plugin-techdocs-react": "^1.2.5", + "@backstage/plugin-user-settings": "^0.8.8", + "@backstage/theme": "^0.5.6", + "@internal/plugin-apache-spark": "^0.1.0", + "@internal/plugin-argo-workflows": "^0.1.0", + "@internal/plugin-cnoe-ui": "^0.1.0", + "@internal/plugin-terraform": "^0.1.0", "@material-ui/core": "^4.12.2", "@material-ui/icons": "^4.9.1", "@roadiehq/backstage-plugin-argo-cd": "^2.5.1", @@ -52,7 +56,7 @@ "react-use": "^17.2.4" }, "devDependencies": { - "@backstage/test-utils": "^1.7.7", + "@backstage/test-utils": "^1.5.7", "@playwright/test": "^1.32.3", "@testing-library/dom": "^9.0.0", "@testing-library/jest-dom": "^6.0.0", diff --git a/packages/app/src/App.tsx b/packages/app/src/App.tsx index 74fc096..ae87f47 100644 --- a/packages/app/src/App.tsx +++ b/packages/app/src/App.tsx @@ -33,7 +33,19 @@ import { AppRouter, FlatRoutes } from '@backstage/core-app-api'; import { CatalogGraphPage } from '@backstage/plugin-catalog-graph'; import { RequirePermission } from '@backstage/plugin-permission-react'; import { catalogEntityCreatePermission } from '@backstage/plugin-catalog-common/alpha'; +import LightIcon from '@material-ui/icons/WbSunny'; +import { + CNOEHomepage, + cnoeLightTheme, + cnoeDarkTheme, +} from '@internal/plugin-cnoe-ui'; import {configApiRef, useApi} from "@backstage/core-plugin-api"; +import { ArgoWorkflowsPage } from '@internal/plugin-argo-workflows'; +import { ApacheSparkPage } from '@internal/plugin-apache-spark'; +import { + UnifiedThemeProvider +} from "@backstage/theme"; +import { TerraformPluginPage } from '@internal/plugin-terraform'; const app = createApp({ apis, @@ -72,12 +84,33 @@ const app = createApp({ bind(orgPlugin.externalRoutes, { catalogIndex: catalogPlugin.routes.catalogIndex, }); - } + }, + themes: [ + { + id: 'cnoe-light-theme', + title: 'Light Theme', + variant: 'light', + icon: , + Provider: ({ children }) => ( + + ), + }, + { + id: 'cnoe-dark-theme', + title: 'Dark Theme', + variant: 'dark', + icon: , + Provider: ({ children }) => ( + + ), + }, + ], }); const routes = ( } /> + } /> } /> } /> } /> + } /> + } /> + } /> ); @@ -125,3 +161,6 @@ export default app.createRoot( , ); + + + diff --git a/packages/app/src/components/Root/Root.tsx b/packages/app/src/components/Root/Root.tsx index 51a4e86..28d7342 100644 --- a/packages/app/src/components/Root/Root.tsx +++ b/packages/app/src/components/Root/Root.tsx @@ -5,6 +5,7 @@ import ExtensionIcon from '@material-ui/icons/Extension'; import MapIcon from '@material-ui/icons/MyLocation'; import LibraryBooks from '@material-ui/icons/LibraryBooks'; import CreateComponentIcon from '@material-ui/icons/AddCircleOutline'; +import {LogoFull, LogoIcon} from '@internal/plugin-cnoe-ui'; import { Settings as SidebarSettings, UserSettingsSignInAvatar, @@ -19,6 +20,7 @@ import { SidebarPage, SidebarScrollWrapper, SidebarSpace, + useSidebarOpenState, Link, } from '@backstage/core-components'; import MenuIcon from '@material-ui/icons/Menu'; @@ -41,10 +43,12 @@ const useSidebarLogoStyles = makeStyles({ const SidebarLogo = () => { const classes = useSidebarLogoStyles(); + const { isOpen } = useSidebarOpenState(); return (
+ {isOpen ? : }
); diff --git a/packages/app/src/components/catalog/EntityPage.tsx b/packages/app/src/components/catalog/EntityPage.tsx index a56185c..a5a32cf 100644 --- a/packages/app/src/components/catalog/EntityPage.tsx +++ b/packages/app/src/components/catalog/EntityPage.tsx @@ -10,8 +10,11 @@ import { } from '@backstage/plugin-api-docs'; import { EntityAboutCard, + EntityDependsOnComponentsCard, + EntityDependsOnResourcesCard, EntityHasComponentsCard, EntityHasResourcesCard, + EntityHasSubcomponentsCard, EntityHasSystemsCard, EntityLayout, EntityLinksCard, @@ -25,6 +28,10 @@ import { hasRelationWarnings, EntityRelationWarning, } from '@backstage/plugin-catalog'; +import { + isGithubActionsAvailable, + EntityGithubActionsContent, +} from '@backstage-community/plugin-github-actions'; import { EntityUserProfileCard, EntityGroupProfileCard, @@ -51,13 +58,20 @@ import { import { TechDocsAddons } from '@backstage/plugin-techdocs-react'; import { ReportIssue } from '@backstage/plugin-techdocs-module-addons-contrib'; -import { EntityKubernetesContent, isKubernetesAvailable } from '@backstage/plugin-kubernetes'; +import { EntityKubernetesContent } from '@backstage/plugin-kubernetes'; import { EntityArgoCDOverviewCard, isArgocdAvailable } from '@roadiehq/backstage-plugin-argo-cd'; +import { + EntityArgoWorkflowsOverviewCard, EntityArgoWorkflowsTemplateOverviewCard, + isArgoWorkflowsAvailable, +} from '@internal/plugin-argo-workflows'; +import {ApacheSparkPage, isApacheSparkAvailable} from "@internal/plugin-apache-spark"; +import { isTerraformAvailable, TerraformPluginPage } from '@internal/plugin-terraform'; + const techdocsContent = ( @@ -67,7 +81,13 @@ const techdocsContent = ( ); const cicdContent = ( + // This is an example of how you can implement your company's logic in entity page. + // You can for example enforce that all components of type 'service' should use GitHubActions + + + + + + isArgoWorkflowsAvailable(e)}> + + + + + + + + + + isTerraformAvailable(e)}> + + + + + + + + + + ); @@ -147,10 +189,14 @@ const serviceEntityPage = ( {cicdContent} - isKubernetesAvailable(e)}> + + + + + @@ -162,6 +208,17 @@ const serviceEntityPage = ( + + + + + + + + + + + {techdocsContent} @@ -178,6 +235,17 @@ const websiteEntityPage = ( {cicdContent} + + + + + + + + + + + {techdocsContent} @@ -228,6 +296,9 @@ const apiPage = ( + + + diff --git a/packages/backend/package.json b/packages/backend/package.json index 1593c24..d600292 100644 --- a/packages/backend/package.json +++ b/packages/backend/package.json @@ -16,41 +16,40 @@ "build-image": "docker build ../.. -f Dockerfile --tag backstage" }, "dependencies": { - "@backstage/backend-common": "^0.25.0", - "@backstage/backend-defaults": "^0.9.0", - "@backstage/backend-plugin-api": "^1.3.0", - "@backstage/backend-tasks": "^0.6.1", - "@backstage/catalog-client": "^1.9.1", - "@backstage/catalog-model": "^1.7.3", - "@backstage/config": "^1.3.2", - "@backstage/errors": "^1.2.7", - "@backstage/integration": "^1.16.3", - "@backstage/plugin-app-backend": "^0.5.1", - "@backstage/plugin-auth-backend": "^0.24.5", - "@backstage/plugin-auth-backend-module-guest-provider": "^0.2.7", - "@backstage/plugin-auth-backend-module-oidc-provider": "^0.4.2", - "@backstage/plugin-auth-node": "^0.6.2", - "@backstage/plugin-catalog-backend": "^1.32.1", - "@backstage/plugin-catalog-backend-module-scaffolder-entity-model": "^0.2.7", - "@backstage/plugin-kubernetes-backend": "^0.19.5", - "@backstage/plugin-permission-common": "^0.8.4", - "@backstage/plugin-permission-node": "^0.9.1", - "@backstage/plugin-proxy-backend": "^0.6.1", - "@backstage/plugin-scaffolder-backend": "^1.32.1", - "@backstage/plugin-scaffolder-backend-module-gitea": "^0.2.8", - "@backstage/plugin-scaffolder-backend-module-github": "^0.7.0", - "@backstage/plugin-scaffolder-node": "^0.8.1", - "@backstage/plugin-search-backend": "^2.0.1", - "@backstage/plugin-search-backend-module-catalog": "^0.3.3", - "@backstage/plugin-search-backend-module-pg": "^0.5.43", - "@backstage/plugin-search-backend-module-techdocs": "^0.4.1", - "@backstage/plugin-search-backend-node": "^1.3.10", - "@backstage/plugin-techdocs-backend": "^2.0.1", - "@backstage/types": "^1.2.1", + "@backstage/backend-common": "^0.23.2", + "@backstage/backend-defaults": "^0.4.0", + "@backstage/backend-plugin-api": "^0.7.0", + "@backstage/backend-tasks": "^0.5.26", + "@backstage/catalog-client": "^1.6.5", + "@backstage/catalog-model": "^1.5.0", + "@backstage/config": "^1.2.0", + "@backstage/errors": "^1.2.4", + "@backstage/integration": "^1.12.0", + "@backstage/plugin-app-backend": "^0.3.70", + "@backstage/plugin-auth-backend": "^0.22.8", + "@backstage/plugin-auth-backend-module-oidc-provider": "^0.2.2", + "@backstage/plugin-auth-node": "^0.4.16", + "@backstage/plugin-catalog-backend": "^1.23.2", + "@backstage/plugin-catalog-backend-module-scaffolder-entity-model": "^0.1.19", + "@backstage/plugin-kubernetes-backend": "^0.18.2", + "@backstage/plugin-permission-common": "^0.8.0", + "@backstage/plugin-permission-node": "^0.8.0", + "@backstage/plugin-proxy-backend": "^0.5.2", + "@backstage/plugin-scaffolder-backend": "^1.22.11", + "@backstage/plugin-scaffolder-backend-module-gitea": "^0.1.11", + "@backstage/plugin-scaffolder-backend-module-github": "^0.4.0", + "@backstage/plugin-scaffolder-node": "^0.4.7", + "@backstage/plugin-search-backend": "^1.5.13", + "@backstage/plugin-search-backend-module-catalog": "^0.1.27", + "@backstage/plugin-search-backend-module-pg": "^0.5.31", + "@backstage/plugin-search-backend-module-techdocs": "^0.1.26", + "@backstage/plugin-search-backend-node": "^1.2.26", + "@backstage/plugin-techdocs-backend": "^1.10.8", + "@backstage/types": "^1.1.1", + "@internal/backstage-plugin-terraform-backend": "^0.1.0", "@kubernetes/client-node": "~0.20.0", - "@roadiehq/backstage-plugin-argo-cd-backend": "3.1.0", - "@roadiehq/scaffolder-backend-module-http-request": "^4.3.5", - "@roadiehq/scaffolder-backend-module-utils": "3.0.0", + "@roadiehq/backstage-plugin-argo-cd-backend": "3.0.2", + "@roadiehq/scaffolder-backend-module-utils": "^1.17.0", "app": "link:../app", "better-sqlite3": "^9.0.0", "dockerode": "^3.3.1", @@ -62,7 +61,7 @@ "winston": "^3.2.1" }, "devDependencies": { - "@backstage/cli": "^0.32.0", + "@backstage/cli": "^0.26.10", "@types/dockerode": "^3.3.0", "@types/express": "^4.17.6", "@types/express-serve-static-core": "^4.17.5", diff --git a/packages/backend/src/index.ts b/packages/backend/src/index.ts index 5fc9d6b..77a146f 100644 --- a/packages/backend/src/index.ts +++ b/packages/backend/src/index.ts @@ -1,45 +1,36 @@ import { createBackend } from '@backstage/backend-defaults'; -import { cnoeScaffolderActions } from './plugins/scaffolder'; import { authModuleKeycloakOIDCProvider } from './plugins/auth'; +import { cnoeScaffolderActions } from './plugins/scaffolder'; +import { legacyPlugin } from '@backstage/backend-common'; const backend = createBackend(); // core plugins -backend.add(import('@backstage/plugin-app-backend')); -backend.add(import('@backstage/plugin-catalog-backend')); -backend.add(import('@backstage/plugin-proxy-backend')); +backend.add(import('@backstage/plugin-app-backend/alpha')); +backend.add(import('@backstage/plugin-catalog-backend/alpha')); +backend.add(import('@backstage/plugin-proxy-backend/alpha')); backend.add(import('@backstage/plugin-techdocs-backend/alpha')); - // auth plugins backend.add(import('@backstage/plugin-auth-backend')); -backend.add(import('@backstage/plugin-auth-backend-module-guest-provider')); - // scaffolder plugins backend.add(import('@backstage/plugin-scaffolder-backend/alpha')); backend.add( import('@backstage/plugin-catalog-backend-module-scaffolder-entity-model'), ); backend.add(import('@backstage/plugin-scaffolder-backend-module-github')); - // search plugins backend.add(import('@backstage/plugin-search-backend/alpha')); - -backend.add(import('@backstage/plugin-search-backend-module-catalog')); +backend.add(import('@backstage/plugin-search-backend-module-catalog/alpha')); backend.add(import('@backstage/plugin-search-backend-module-techdocs/alpha')); - // other @backstage plugins -backend.add(import('@backstage/plugin-kubernetes-backend')); - +backend.add(import('@backstage/plugin-kubernetes-backend/alpha')); +// non-core plugins // roadie plugins backend.add(import('@roadiehq/scaffolder-backend-module-utils/new-backend')); -backend.add(import('./plugins/argocd_index')); - -backend.add( - import('@roadiehq/scaffolder-backend-module-http-request/new-backend'), -); - +backend.add(legacyPlugin('argocd', import('./plugins/argocd'))); // cnoe plugins backend.add(authModuleKeycloakOIDCProvider); backend.add(cnoeScaffolderActions); +backend.add(import('@internal/backstage-plugin-terraform-backend')); -backend.start(); \ No newline at end of file +backend.start(); diff --git a/packages/backend/src/plugins/argocd.ts b/packages/backend/src/plugins/argocd.ts index 8bcd3e9..611bf54 100644 --- a/packages/backend/src/plugins/argocd.ts +++ b/packages/backend/src/plugins/argocd.ts @@ -2,42 +2,18 @@ import { Config } from '@backstage/config'; import { createTemplateAction } from '@backstage/plugin-scaffolder-node'; import { examples } from './gitea-actions'; import { Logger } from 'winston'; + import { ArgoService } from '@roadiehq/backstage-plugin-argo-cd-backend'; + import { createRouter } from '@roadiehq/backstage-plugin-argo-cd-backend'; -import { loggerToWinstonLogger } from '@backstage/backend-common'; +import { PluginEnvironment } from '../types'; -import { - coreServices, - createBackendPlugin, -} from '@backstage/backend-plugin-api'; - -export const argocdPlugin = createBackendPlugin({ - pluginId: 'argocd', - register(env) { - env.registerInit({ - deps: { - logger: coreServices.logger, - config: coreServices.rootConfig, - reader: coreServices.urlReader, - discovery: coreServices.discovery, - auth: coreServices.auth, - httpRouter: coreServices.httpRouter, - }, - async init({ - logger, - config, - httpRouter, - }) { - httpRouter.use( - await createRouter({ - logger: loggerToWinstonLogger(logger), - config, - }), - ); - }, - }); - }, -}); +export default async function createPlugin({ + logger, + config, +}: PluginEnvironment) { + return await createRouter({ logger, config }); +} export function createArgoCDApp(options: { config: Config; logger: Logger }) { const { config, logger } = options; diff --git a/packages/backend/src/plugins/argocd_index.ts b/packages/backend/src/plugins/argocd_index.ts deleted file mode 100644 index bd0bc7e..0000000 --- a/packages/backend/src/plugins/argocd_index.ts +++ /dev/null @@ -1 +0,0 @@ -export { argocdPlugin as default } from './argocd'; \ No newline at end of file diff --git a/packages/backend/src/plugins/proxy.ts b/packages/backend/src/plugins/proxy.ts index f61619d..54ec393 100644 --- a/packages/backend/src/plugins/proxy.ts +++ b/packages/backend/src/plugins/proxy.ts @@ -1,4 +1,4 @@ -import { createRouter } from '@roadiehq/backstage-plugin-argo-cd-backend'; +import { createRouter } from '@backstage/plugin-proxy-backend'; import { Router } from 'express'; import { PluginEnvironment } from '../types'; @@ -7,6 +7,7 @@ export default async function createPlugin( ): Promise { return await createRouter({ logger: env.logger, - config: env.config + config: env.config, + discovery: env.discovery, }); } diff --git a/packages/backend/src/types.ts b/packages/backend/src/types.ts index 0dad120..9cd2c74 100644 --- a/packages/backend/src/types.ts +++ b/packages/backend/src/types.ts @@ -5,8 +5,9 @@ import { PluginDatabaseManager, PluginEndpointDiscovery, TokenManager, -} from '@backstage/backend-common/dist'; //TODO: deprecated -import { PluginTaskScheduler } from '@backstage/backend-tasks/dist'; + UrlReader, +} from '@backstage/backend-common'; +import { PluginTaskScheduler } from '@backstage/backend-tasks'; import { PermissionEvaluator } from '@backstage/plugin-permission-common'; import { IdentityApi } from '@backstage/plugin-auth-node'; @@ -15,6 +16,7 @@ export type PluginEnvironment = { database: PluginDatabaseManager; cache: PluginCacheManager; config: Config; + reader: UrlReader; discovery: PluginEndpointDiscovery; tokenManager: TokenManager; scheduler: PluginTaskScheduler; diff --git a/plugins/apache-spark/.eslintrc.js b/plugins/apache-spark/.eslintrc.js new file mode 100644 index 0000000..e2a53a6 --- /dev/null +++ b/plugins/apache-spark/.eslintrc.js @@ -0,0 +1 @@ +module.exports = require('@backstage/cli/config/eslint-factory')(__dirname); diff --git a/plugins/apache-spark/README.md b/plugins/apache-spark/README.md new file mode 100644 index 0000000..976aba2 --- /dev/null +++ b/plugins/apache-spark/README.md @@ -0,0 +1,13 @@ +# apache-spark + +Welcome to the apache-spark plugin! + +_This plugin was created through the Backstage CLI_ + +## Getting started + +Your plugin has been added to the example app in this repository, meaning you'll be able to access it by running `yarn start` in the root directory, and then navigating to [/apache-spark](http://localhost:3000/apache-spark). + +You can also serve the plugin in isolation by running `yarn start` in the plugin directory. +This method of serving the plugin provides quicker iteration speed and a faster startup and hot reloads. +It is only meant for local development, and the setup for it can be found inside the [/dev](./dev) directory. diff --git a/plugins/apache-spark/dev/index.tsx b/plugins/apache-spark/dev/index.tsx new file mode 100644 index 0000000..5f2b474 --- /dev/null +++ b/plugins/apache-spark/dev/index.tsx @@ -0,0 +1,12 @@ +import React from 'react'; +import { createDevApp } from '@backstage/dev-utils'; +import { apacheSparkPlugin, ApacheSparkPage } from '../src/plugin'; + +createDevApp() + .registerPlugin(apacheSparkPlugin) + .addPage({ + element: , + title: 'Root Page', + path: '/apache-spark' + }) + .render(); diff --git a/plugins/apache-spark/package.json b/plugins/apache-spark/package.json new file mode 100644 index 0000000..71639db --- /dev/null +++ b/plugins/apache-spark/package.json @@ -0,0 +1,51 @@ +{ + "name": "@internal/plugin-apache-spark", + "version": "0.1.0", + "main": "src/index.ts", + "types": "src/index.ts", + "license": "Apache-2.0", + "private": true, + "publishConfig": { + "access": "public", + "main": "dist/index.esm.js", + "types": "dist/index.d.ts" + }, + "backstage": { + "role": "frontend-plugin" + }, + "sideEffects": false, + "scripts": { + "start": "backstage-cli package start", + "build": "backstage-cli package build", + "lint": "backstage-cli package lint", + "test": "backstage-cli package test", + "clean": "backstage-cli package clean", + "prepack": "backstage-cli package prepack", + "postpack": "backstage-cli package postpack" + }, + "dependencies": { + "@backstage/core-components": "^0.14.8", + "@backstage/core-plugin-api": "^1.9.3", + "@backstage/theme": "^0.5.6", + "@material-ui/core": "^4.9.13", + "@material-ui/icons": "^4.9.1", + "@material-ui/lab": "^4.0.0-alpha.61", + "react-use": "^17.2.4" + }, + "peerDependencies": { + "react": "^16.13.1 || ^17.0.0" + }, + "devDependencies": { + "@backstage/cli": "^0.26.10", + "@backstage/core-app-api": "^1.13.0", + "@backstage/dev-utils": "^1.0.34", + "@backstage/test-utils": "^1.5.7", + "@testing-library/jest-dom": "^5.10.1", + "@testing-library/react": "^12.1.3", + "@testing-library/user-event": "^14.0.0", + "msw": "^1.0.0" + }, + "files": [ + "dist" + ] +} diff --git a/plugins/apache-spark/src/api/index.test.ts b/plugins/apache-spark/src/api/index.test.ts new file mode 100644 index 0000000..20f775b --- /dev/null +++ b/plugins/apache-spark/src/api/index.test.ts @@ -0,0 +1,113 @@ +// import { ApacheSparkClient } from './index'; +// import { ApacheSpark } from './model'; +// +// const mockKubernetesApi = { +// proxy: jest.fn(), +// getClusters: jest.fn(), +// getObjectsByEntity: jest.fn(), +// getWorkloadsByEntity: jest.fn(), +// getCustomObjectsByEntity: jest.fn(), +// }; +// +// describe('ApacheSparkClient', () => { +// let apacheSparkClient: ApacheSparkClient; +// +// beforeEach(() => { +// apacheSparkClient = new ApacheSparkClient(mockKubernetesApi); +// }); +// +// afterEach(() => { +// jest.clearAllMocks(); +// }); +// +// it('should fetch Spark application logs', async () => { +// mockKubernetesApi.proxy.mockResolvedValue({ +// ok: true, +// text: () => { +// return 'logs'; +// }, +// }); +// const logs = await apacheSparkClient.getLogs( +// 'cluster1', +// 'spark-namespace', +// 'spark-pod-name', +// 'abc', +// ); +// expect(logs).toEqual('logs'); +// expect(mockKubernetesApi.proxy).toHaveBeenCalledWith({ +// clusterName: 'cluster1', +// path: '/api/v1/namespaces/spark-namespace/pods/spark-pod-name/log?tailLines=1000&container=abc', +// }); +// }); +// +// it('should throw error if Spark application logs are not fetched', async () => { +// mockKubernetesApi.proxy.mockResolvedValueOnce({ +// status: 500, +// statusText: 'Internal Server Error', +// ok: false, +// text: () => { +// return 'oh noes'; +// }, +// }); +// +// await expect( +// apacheSparkClient.getLogs( +// 'spark-app-name', +// 'spark-namespace', +// 'spark-pod-name', +// 'abc', +// ), +// ).rejects.toEqual( +// 'failed to fetch logs: 500, Internal Server Error, oh noes', +// ); +// }); +// +// // test getSparkApp method +// it('should fetch Spark application', async () => { +// // @ts-ignore +// const mockResponse: ApacheSpark = { +// apiVersion: 'sparkoperator.k8s.io/v1beta2', +// kind: 'SparkApplication', +// metadata: { +// name: 'spark-app-name', +// namespace: 'spark-namespace', +// labels: { +// app: 'spark-app-name', +// }, +// creationTimestamp: '2021-01-01T00:00:00Z', +// }, +// spec: { +// image: 'abc', +// mainApplicationFile: 'main.py', +// mode: 'cluster', +// sparkVersion: 'v3.1.1.', +// type: 'Python', +// driver: { +// cores: 1, +// }, +// executor: { +// cores: 1, +// }, +// }, +// status: { +// applicationState: { +// state: 'RUNNING', +// }, +// }, +// }; +// +// mockKubernetesApi.proxy.mockResolvedValue({ +// ok: true, +// text: () => { +// return JSON.stringify(mockResponse); +// }, +// }); +// +// const application = await apacheSparkClient.getSparkApp( +// 'spark-app-name', +// 'spark-namespace', +// 'abc', +// ); +// expect(application).toEqual(mockResponse); +// }); +// }); diff --git a/plugins/apache-spark/src/api/index.ts b/plugins/apache-spark/src/api/index.ts new file mode 100644 index 0000000..cda9454 --- /dev/null +++ b/plugins/apache-spark/src/api/index.ts @@ -0,0 +1,176 @@ +import { createApiRef } from '@backstage/core-plugin-api'; +import { ApacheSpark, ApacheSparkList, Pod } from './model'; +import { KubernetesApi } from '@backstage/plugin-kubernetes'; + +export const apacheSparkApiRef = createApiRef({ + id: 'plugin.apachespark', +}); + +const API_VERSION = 'sparkoperator.k8s.io/v1beta2'; +const SPARK_APP_PLURAL = 'sparkapplications'; +const K8s_API_TIMEOUT = 'timeoutSeconds'; + +export interface ApacheSparkApi { + getSparkApps( + clusterName: string | undefined, + namespace: string | undefined, + labels: string | undefined, + ): Promise; + + getSparkApp( + clusterName: string | undefined, + namespace: string | undefined, + name: string, + ): Promise; + + getLogs( + clusterName: string | undefined, + namespace: string | undefined, + podName: string, + containerName?: string | undefined, + tailLine?: number, + ): Promise; + + getContainers( + clusterName: string | undefined, + namespace: string | undefined, + podName: string, + ): Promise; +} + +export class ApacheSparkClient implements ApacheSparkApi { + private kubernetesApi: KubernetesApi; + constructor(kubernetesApi: KubernetesApi) { + this.kubernetesApi = kubernetesApi; + } + async getSparkApps( + clusterName: string | undefined, + namespace: string | undefined, + labels: string | undefined, + ): Promise { + const ns = namespace !== undefined ? namespace : 'default'; + const path = `/apis/${API_VERSION}/namespaces/${ns}/${SPARK_APP_PLURAL}`; + const query = new URLSearchParams({ + [K8s_API_TIMEOUT]: '30', + }); + if (labels) { + query.set('labelSelector', labels); + } + const resp = await this.kubernetesApi.proxy({ + clusterName: + clusterName !== undefined ? clusterName : await this.getFirstCluster(), + path: `${path}?${query.toString()}`, + }); + + if (!resp.ok) { + return Promise.reject( + `failed to fetch resources: ${resp.status}, ${ + resp.statusText + }, ${await resp.text()}`, + ); + } + const out = JSON.parse(await resp.text()); + this.removeManagedField(out); + return out; + } + + async getSparkApp( + clusterName: string | undefined, + namespace: string | undefined, + name: string, + ): Promise { + const ns = namespace !== undefined ? namespace : 'default'; + const path = `/apis/${API_VERSION}/namespaces/${ns}/${SPARK_APP_PLURAL}/${name}`; + const resp = await this.kubernetesApi.proxy({ + clusterName: + clusterName !== undefined ? clusterName : await this.getFirstCluster(), + path: `${path}`, + }); + if (!resp.ok) { + return Promise.reject( + `failed to fetch resources: ${resp.status}, ${ + resp.statusText + }, ${await resp.text()}`, + ); + } + const out = JSON.parse(await resp.text()); + this.removeManagedField(out); + return out; + } + + async getLogs( + clusterName: string | undefined, + namespace: string | undefined, + podName: string, + containerName: string | undefined, + tailLine: number = 1000, + ): Promise { + const ns = namespace !== undefined ? namespace : 'default'; + const path = `/api/v1/namespaces/${ns}/pods/${podName}/log`; + const query = new URLSearchParams({ + tailLines: tailLine.toString(), + }); + if (containerName) { + query.set('container', containerName); + } + + const resp = await this.kubernetesApi.proxy({ + clusterName: + clusterName !== undefined ? clusterName : await this.getFirstCluster(), + path: `${path}?${query.toString()}`, + }); + if (!resp.ok) { + return Promise.reject( + `failed to fetch logs: ${resp.status}, ${ + resp.statusText + }, ${await resp.text()}`, + ); + } + return resp.text(); + } + + async getContainers( + clusterName: string | undefined, + namespace: string | undefined, + podName: string, + ): Promise { + const ns = namespace !== undefined ? namespace : 'default'; + const path = `/api/v1/namespaces/${ns}/pods/${podName}`; + const query = new URLSearchParams({ + [K8s_API_TIMEOUT]: '30', + }); + const resp = await this.kubernetesApi.proxy({ + clusterName: + clusterName !== undefined ? clusterName : await this.getFirstCluster(), + path: `${path}?${query.toString()}`, + }); + if (!resp.ok) { + throw new Error( + `failed to fetch logs: ${resp.status}, ${ + resp.statusText + }, ${await resp.text()}`, + ); + } + const pod = JSON.parse(await resp.text()) as Pod; + return pod.spec.containers.map(c => c.name); + } + + async getFirstCluster(): Promise { + const clusters = await this.kubernetesApi.getClusters(); + if (clusters.length > 0) { + return Promise.resolve(clusters[0].name); + } + return Promise.reject('no clusters found in configuration'); + } + + removeManagedField(spark: any) { + if (spark.metadata?.hasOwnProperty('managedFields')) { + delete spark.metadata.managedFields; + } + if (spark.items) { + for (const i of spark.items) { + this.removeManagedField(i); + } + } + } +} diff --git a/plugins/apache-spark/src/api/model.ts b/plugins/apache-spark/src/api/model.ts new file mode 100644 index 0000000..1d6455c --- /dev/null +++ b/plugins/apache-spark/src/api/model.ts @@ -0,0 +1,100 @@ +export type Metadata = { + name: string; + namespace?: string; + labels?: Record; + annotations?: Record; + creationTimestamp: string; + managedFields?: any; +}; + +export type Spec = { + arguments?: string[]; + batchScheduler?: string; + driver: { + coreLimit?: string; + coreRequest?: string; + cores?: number; + gpu?: { + name: string; + quantity: number; + }; + labels?: Record; + memory?: string; + memoryOverhead?: string; + podName?: string; + schedulerName?: string; + serviceAccount?: string; + }; + executor: { + coreLimit?: string; + coreRequest?: string; + cores?: number; + gpu?: { + name: string; + quantity: number; + }; + instances?: number; + labels?: Record; + memory?: string; + memoryOverhead?: string; + schedulerName?: string; + serviceAccount?: string; + }; + image: string; + mainClass?: string; + mainApplicationFile?: string; + mode: string; + pythonVersion?: string; + sparkVersion: string; + type: string; +}; + +export type Status = { + applicationState: { + errorMessage?: string; + state: string; + }; + driverInfo?: { + podName: string; + webUIAddress: string; + webUIIngressAddress: string; + webUIIngressName: string; + webUIPort: string; + webUIServiceName: string; + }; + executionAttempts?: number; + executorState?: { [key: string]: string }; + lastSubmissionAttemptTime?: string; + sparkApplicationId?: string; + submissionAttempts?: number; + submissionID?: string; + terminationTime?: string; +}; + +export type ApacheSpark = { + apiVersion: string; + kind: string; + metadata: Metadata; + spec: Spec; + status: Status; +}; + +export type ApacheSparkList = { + apiVersion: string; + kind: string; + items?: ApacheSpark[]; +}; + +export type Pod = { + apiVersion: string; + kind: string; + metadata: Metadata; + spec: PodSpec; +}; + +export type PodSpec = { + containers: { + image: string; + name: string; + }[]; +}; diff --git a/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.test.tsx b/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.test.tsx new file mode 100644 index 0000000..01d3ade --- /dev/null +++ b/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.test.tsx @@ -0,0 +1,83 @@ +import React from 'react'; +import { render, screen } from '@testing-library/react'; +import { useApi } from '@backstage/core-plugin-api'; +import { useEntity } from '@backstage/plugin-catalog-react'; +import useAsync from 'react-use/lib/useAsync'; +import { ApacheSpark } from '../../api/model'; +import { ApacheSparkDriverLogs } from './ApacheSparkLogs'; +import { + APACHE_SPARK_LABEL_SELECTOR_ANNOTATION, + CLUSTER_NAME_ANNOTATION, + K8S_NAMESPACE_ANNOTATION, +} from '../../consts'; + +jest.mock('@backstage/core-plugin-api'); +jest.mock('react-use/lib/useAsync'); +jest.mock('@backstage/plugin-catalog-react'); + +jest.mock('@backstage/core-components', () => ({ + LogViewer: (props: { text: string }) => { + return
{props.text}
; + }, +})); + +describe('ApacheSparkDriverLogs', () => { + const mockUseApi = useApi as jest.MockedFunction; + const mockUseAsync = useAsync as jest.MockedFunction; + const mockUseEntity = useEntity as jest.MockedFunction; + const mockGetLogs = jest.fn(); + const mockSparkApp = { + status: { + driverInfo: { + podName: 'test-pod', + }, + }, + } as ApacheSpark; + + beforeEach(() => { + mockUseApi.mockReturnValue({ + getLogs: mockGetLogs, + }); + mockUseEntity.mockReturnValue({ + entity: { + apiVersion: 'version', + kind: 'kind', + metadata: { + name: 'name', + namespace: 'ns1', + annotations: { + [K8S_NAMESPACE_ANNOTATION]: 'k8s-ns', + [CLUSTER_NAME_ANNOTATION]: 'my-cluster', + [APACHE_SPARK_LABEL_SELECTOR_ANNOTATION]: 'env=test', + }, + }, + }, + }); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should render error message if there is an error', () => { + mockUseAsync.mockReturnValue({ + value: undefined, + loading: false, + error: new Error('Test error'), + }); + + render(); + expect(screen.getByText('Error: Test error')).toBeInTheDocument(); + expect(screen.getByRole('alert')).toBeInTheDocument(); + }); + + it('should render the log viewer with the fetched logs', async () => { + mockUseAsync.mockReturnValue({ + value: 'test logs', + loading: false, + error: undefined, + }); + render(); + expect(screen.getByText('test logs')).toBeInTheDocument(); + }); +}); diff --git a/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.tsx b/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.tsx new file mode 100644 index 0000000..e892856 --- /dev/null +++ b/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.tsx @@ -0,0 +1,100 @@ +import { useApi } from '@backstage/core-plugin-api'; +import { apacheSparkApiRef } from '../../api'; +import useAsync from 'react-use/lib/useAsync'; +import { ApacheSpark } from '../../api/model'; +import { + LogViewer, + Progress, + Select, + SelectedItems, + SelectItem, +} from '@backstage/core-components'; +import Alert from '@material-ui/lab/Alert'; +import React, { useEffect, useState } from 'react'; +import { useEntity } from '@backstage/plugin-catalog-react'; +import { getAnnotationValues } from '../utils'; + +export const ApacheSparkDriverLogs = (props: { sparkApp: ApacheSpark }) => { + const apiClient = useApi(apacheSparkApiRef); + const { entity } = useEntity(); + const { ns, clusterName } = getAnnotationValues(entity); + + const { value, loading, error } = useAsync(async (): Promise => { + return await apiClient.getLogs( + clusterName, + ns, + props.sparkApp.status.driverInfo?.podName!, + 'spark-kubernetes-driver', + ); + }, [props]); + if (loading) { + return ; + } else if (error) { + return {`${error}`}; + } + return ; +}; + +const ExecutorLogs = (props: { name: string }) => { + const apiClient = useApi(apacheSparkApiRef); + const { entity } = useEntity(); + const [logs, setLogs] = useState(''); + const { ns, clusterName } = getAnnotationValues(entity); + + useEffect(() => { + async function getLogs() { + try { + const val = await apiClient.getLogs( + clusterName, + ns, + props.name, + 'spark-kubernetes-executor', + ); + setLogs(val); + } catch (e) { + if (typeof e === 'string') { + setLogs(e); + } + } + } + if (props.name !== '') { + getLogs(); + } + }, [apiClient, clusterName, ns, props]); + + return ; +}; + +export const ApacheSparkExecutorLogs = (props: { sparkApp: ApacheSpark }) => { + const [selected, setSelected] = useState(''); + if (props.sparkApp.status.applicationState.state !== 'RUNNING') { + return ( + + Executor logs are only available for Spark Applications in RUNNING state + + ); + } + const executors: SelectItem[] = [{ label: '', value: '' }]; + for (const key in props.sparkApp.status.executorState) { + if (props.sparkApp.status.executorState.hasOwnProperty(key)) { + executors.push({ label: key, value: key }); + } + } + + const handleChange = (item: SelectedItems) => { + if (typeof item === 'string' && item !== '') { + setSelected(item); + } + }; + return ( + <> +