diff --git a/.dockerignore b/.dockerignore index 05edb62..cce8f97 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,8 +1,6 @@ -.git -.yarn/cache -.yarn/install-state.gz +dist-types node_modules -packages/*/src +packages/*/dist packages/*/node_modules -plugins -*.local.yaml +plugins/*/dist +plugins/*/node_modules diff --git a/.github/workflows/build-and-push.yaml b/.github/workflows/build-and-push.yaml new file mode 100644 index 0000000..9bb12b8 --- /dev/null +++ b/.github/workflows/build-and-push.yaml @@ -0,0 +1,51 @@ +name: ci + +on: push + +jobs: + build: + runs-on: ubuntu-22.04 + + steps: + - + name: Repository meta + id: repository + run: | + registry=${{ github.server_url }} + registry=${registry##http*://} + echo "registry=${registry}" >> "$GITHUB_OUTPUT" + echo "registry=${registry}" + repository="$(echo "${{ github.repository }}" | tr '[:upper:]' '[:lower:]')" + echo "repository=${repository}" >> "$GITHUB_OUTPUT" + echo "repository=${repository}" + - + name: Docker meta + uses: docker/metadata-action@v5 + id: docker + with: + images: ${{ steps.repository.outputs.registry }}/${{ steps.repository.outputs.repository }} + - + name: Login to registry + uses: docker/login-action@v3 + with: + registry: ${{ steps.repository.outputs.registry }} + username: ${{ secrets.PACKAGES_USER }} + password: ${{ secrets.PACKAGES_TOKEN }} + - + name: Set up QEMU + uses: docker/setup-qemu-action@v3 + - + name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + with: + buildkitd-flags: '--allow-insecure-entitlement network.host' + driver-opts: network=host + - + name: Build and push + uses: docker/build-push-action@v6 + with: + push: true + allow: network.host + network: host + platforms: linux/amd64,linux/arm64 + tags: ${{ steps.docker.outputs.tags }} diff --git a/.yarnrc.yml b/.yarnrc.yml new file mode 100644 index 0000000..3186f3f --- /dev/null +++ b/.yarnrc.yml @@ -0,0 +1 @@ +nodeLinker: node-modules diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..6d4d598 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,94 @@ +# Stage 1 - Create yarn install skeleton layer +FROM node:20.18.1 AS packages + +WORKDIR /app +COPY package.json yarn.lock ./ + +COPY packages packages + +# Comment this out if you don't have any internal plugins +COPY plugins plugins + +RUN find packages \! -name "package.json" -mindepth 2 -maxdepth 2 -exec rm -rf {} \+ + +# Stage 2 - Install dependencies and build packages +FROM node:20.18.1 AS build + +# Required for arm64 +RUN apt update -y +RUN apt install -y python3 make gcc build-essential bash + +USER node +WORKDIR /app + +COPY --from=packages --chown=node:node /app . + +RUN --mount=type=cache,target=/home/node/.cache/yarn,sharing=locked,uid=1000,gid=1000 \ + yarn install --network-timeout 600000 + +COPY --chown=node:node . . + +RUN yarn tsc +RUN yarn --cwd packages/backend build +# If you have not yet migrated to package roles, use the following command instead: +# RUN yarn --cwd packages/backend backstage-cli backend:bundle --build-dependencies + +RUN mkdir packages/backend/dist/skeleton packages/backend/dist/bundle \ + && tar xzf packages/backend/dist/skeleton.tar.gz -C packages/backend/dist/skeleton \ + && tar xzf packages/backend/dist/bundle.tar.gz -C packages/backend/dist/bundle + +# Stage 3 - Build the actual backend image and install production dependencies +FROM node:20.18.1 + +# Install isolate-vm dependencies, these are needed by the @backstage/plugin-scaffolder-backend. +# Install packages needed to get utility binaries +RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ + --mount=type=cache,target=/var/lib/apt,sharing=locked \ + apt-get update && \ + apt-get install -y --no-install-recommends python3 python3-pip python3-venv g++ build-essential ca-certificates curl + +RUN yarn config set python /usr/bin/python3 + +# Add kubectl for the kube apply plugin. +# Add mkdocs for the TechDocs plugin. +RUN if test "$(uname -m)" = "x86_64"; \ + then \ + curl -L -o /usr/local/bin/kubectl https://dl.k8s.io/release/v1.29.9/bin/linux/amd64/kubectl; \ + fi +RUN if test "$(uname -m)" != "x86_64"; \ + then \ + curl -L -o /usr/local/bin/kubectl https://dl.k8s.io/release/v1.29.9/bin/linux/arm64/kubectl; \ + fi +RUN chmod +x /usr/local/bin/kubectl + +ENV VIRTUAL_ENV=/opt/venv +RUN python3 -m venv $VIRTUAL_ENV +ENV PATH="$VIRTUAL_ENV/bin:$PATH" +RUN pip3 install 'mkdocs-techdocs-core==1.4.2' 'mkdocs-awesome-pages-plugin==2.10.1' + +# From here on we use the least-privileged `node` user to run the backend. +USER node + +# This should create the app dir as `node`. +# If it is instead created as `root` then the `tar` command below will +# fail: `can't create directory 'packages/': Permission denied`. +# If this occurs, then ensure BuildKit is enabled (`DOCKER_BUILDKIT=1`) +# so the app dir is correctly created as `node`. +WORKDIR /app + +# Copy the install dependencies from the build stage and context +COPY --from=build --chown=node:node /app/yarn.lock /app/package.json /app/packages/backend/dist/skeleton/ ./ + +RUN --mount=type=cache,target=/home/node/.cache/yarn,sharing=locked,uid=1000,gid=1000 \ + yarn install --production --network-timeout 600000 + +# Copy the built packages from the build stage +COPY --from=build --chown=node:node /app/packages/backend/dist/bundle/ ./ + +# Copy any other files that we need at runtime +COPY --chown=node:node app-config.yaml ./ + +# This switches many Node.js dependencies to production mode. +ENV NODE_ENV production + +CMD ["node", "packages/backend", "--config", "app-config.yaml"] diff --git a/README.md b/README.md index 8c7c437..61707f6 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,116 @@ -# [Backstage](https://backstage.io) +# EDP Backstage -This is your newly scaffolded Backstage App, Good Luck! +The EDP bespoke version of backstage. -To start the app, run: +With respect to the CNOE stack (where eDF originates from) it is comparable to https://github.com/cnoe-io/backstage-app -```sh -yarn install -yarn dev +At the time writing CNOE-backstage-app is "version": "1.28.4" + +## Container Images + +Container images are pushed to the Cefor Container Registry and available [here](https://forgejo.edf-bootstrap.cx.fg1.ffm.osc.live/DevFW-CICD/-/packages/container/backstage-edp/). + + +## Local Development + +Use of [**edpbuilder**](https://forgejo.edf-bootstrap.cx.fg1.ffm.osc.live/DevFW/edpbuilder.git) is recommended for local setup. + +### Create your local cluster + +Once edpbuilder is installed on your computer, create a stack that you are interested in. For example: + +> Hint: From here on this is the old CNOE README .... no guarantee that this works as described! + +### Update Backstage application config + +Once all ArgoCD applications are healthy, you need to update a few fields in the [app-config.yaml](./app-config.yaml) file. + +#### Update control plane URL + +The control plane port must be updated every time a cluster is created. Run the `kubectl cluster-info` command to get the control plane URL. Once you have your URL, update your `app-config.yaml` file at [this line](https://github.com/cnoe-io/backstage-app/blob/9ee3514e51c1a354b7fe85a90117faf8328bfa0b/app-config.yaml#L122). + +For example: + +```bash +$ kubectl cluster-info + +Kubernetes control plane is running at https://127.0.0.1:36463 +CoreDNS is running at https://127.0.0.1:36463/api/v1/namespaces/kube-system/services/kube-dns:dns/proxy +``` + +For this particular example output, the `https://127.0.0.1:36463` above is the URL you need to use in your `app-config.yaml`. + +#### Update service account token + +Since tokens are generated each time the backstage service account is created, you need to update this value as well. The command to retrieve the service account token is: + +`kubectl -n backstage exec -it deploy/backstage -- cat /var/run/secrets/kubernetes.io/serviceaccount/token` + +Copy the token value and updated the app-config file at [this line](https://github.com/cnoe-io/backstage-app/blob/main/app-config.yaml#L127). + +For example: + +```bash +$ kubectl -n backstage exec -it deploy/backstage -- cat /var/run/secrets/kubernetes.io/serviceaccount/token + +eyJhbGciOiJSUzI1NiIsImtpZCI6IkRxbDRCSnNicjFwekFqdmxwNDc5MHJqeUlFSjhxNHU0LV95OC1s... +``` + +If you do not want to place the token value in your file, you can use environment variables instead: +1. Set [this line](https://github.com/cnoe-io/backstage-app/blob/main/app-config.yaml#L127) value to be `${BACKSTAGE_SA_TOKEN}`. +2. Then export the token value: + ```bash + export BACKSTAGE_SA_TOKEN=$(kubectl -n backstage exec -it deploy/backstage -- cat /var/run/secrets/kubernetes.io/serviceaccount/token) + ``` + +#### Update ArgoCD token + +ArgoCD admin passwords are generated on each fresh installation. You need to update the configuration file accordingly. To obtain your password, run: `./idpbuilder get secrets -p argocd`. Then update [this line](https://github.com/cnoe-io/backstage-app/blob/9ee3514e51c1a354b7fe85a90117faf8328bfa0b/app-config.yaml#L136) + +For example: + +```bash +$ ./idpbuilder get secrets -p argocd + +--------------------------- +Name: argocd-initial-admin-secret +Namespace: argocd +Data: + password : abc + username : admin +``` + +#### Update Gitea Credentials + +Gitea admin passwords are generated on each fresh installation as well. To obtain your password, run: `./idpbuilder get secrets -p argocd`. +Then update [this line](https://github.com/cnoe-io/backstage-app/blob/9ee3514e51c1a354b7fe85a90117faf8328bfa0b/app-config.yaml#L40) and [this line](https://github.com/cnoe-io/backstage-app/blob/9ee3514e51c1a354b7fe85a90117faf8328bfa0b/app-config.yaml#L44). + +For example: + +```bash +$ ./idpbuilder get secrets -p gitea + +--------------------------- +Name: gitea-credential +Namespace: gitea +Data: + password : abc + username : giteaAdmin +```` + +### Start Backstage processes + +Once the `app-config.yaml` file is updated, you are ready to start your backstage instance. For development purposes, using two terminal windows or tabs is recommended. You can also run them through your favorite IDE. + +In the first terminal tab, install dependencies and start the backend. + +```bash +yarn install +yarn run start-backend +``` + +In the first terminal tab, run the frontend. + +```bash +yarn run start ``` diff --git a/app-config.yaml b/app-config.yaml index e338155..0006a70 100644 --- a/app-config.yaml +++ b/app-config.yaml @@ -31,17 +31,16 @@ backend: client: better-sqlite3 connection: ':memory:' # workingDirectory: /tmp # Use this to configure a working directory for the scaffolder, defaults to the OS temp-dir - integrations: gitea: - - baseUrl: https://gitea.cnoe.localtest.me:8443 - host: gitea.cnoe.localtest.me:8443 + - baseUrl: https://cnoe.localtest.me:8443/gitea + host: cnoe.localtest.me:8443 username: giteaAdmin - password: giteaPassword - - baseUrl: https://gitea.cnoe.localtest.me - host: gitea.cnoe.localtest.me:8443 + password: ${GITEA_PASSWORD} + - baseUrl: https://cnoe.localtest.me/gitea + host: cnoe.localtest.me username: giteaAdmin - password: giteaPassword + password: ${GITEA_PASSWORD} proxy: ### Example for how to add a proxy endpoint for the frontend. @@ -61,19 +60,18 @@ techdocs: runIn: 'docker' # Alternatives - 'local' publisher: type: 'local' # Alternatives - 'googleGcs' or 'awsS3'. Read documentation for using alternatives. - auth: # see https://backstage.io/docs/auth/ to learn about auth providers environment: local # set this to development to enable SSO session: secret: abcdfkjalskdfjkla providers: + guest: {} keycloak-oidc: development: - metadataUrl: https://keycloak.cnoe.localtest.me:8443/realms/cnoe/.well-known/openid-configuration + metadataUrl: https://cnoe.localtest.me:8443/keycloak/realms/cnoe/.well-known/openid-configuration clientId: backstage clientSecret: ${KEYCLOAK_CLIENT_SECRET} - scope: 'openid profile email groups' prompt: auto scaffolder: @@ -90,8 +88,7 @@ catalog: - allow: [ Component, System, API, Resource, Location, Template ] locations: - type: url - target: https://gitea.cnoe.localtest.me:8443/giteaAdmin/entities/src/branch/main/catalog-info.yaml - + target: https://cnoe.localtest.me:8443/gitea/giteaAdmin/idpbuilder-localdev-backstage-templates-entities/src/branch/main/catalog-info.yaml # # Local example template # - type: file # target: ../../examples/template/template.yaml @@ -129,10 +126,10 @@ argocd: appLocatorMethods: - type: 'config' instances: - - name: in-cluster - url: https://argocd.cnoe.localtest.me:8443 + - name: local + url: https://cnoe.localtest.me:8443/argocd username: admin # replace with your argocd password e.g. kubectl -n argocd get secret argocd-initial-admin-secret -o jsonpath="{.data.password}" | base64 -d password: ${ARGOCD_ADMIN_PASSWORD} argoWorkflows: - baseUrl: https://argo.cnoe.localtest.me:8443 + baseUrl: https://cnoe.localtest.me:8443/argo-workflows \ No newline at end of file diff --git a/backstage.json b/backstage.json index 6590048..c6aea75 100644 --- a/backstage.json +++ b/backstage.json @@ -1,3 +1,3 @@ { - "version": "1.22.1" + "version": "1.38.1" } diff --git a/cnoe-wrapper.sh b/cnoe-wrapper.sh new file mode 100644 index 0000000..4d5b490 --- /dev/null +++ b/cnoe-wrapper.sh @@ -0,0 +1,35 @@ +#!/bin/bash + +SERVICE_ACCOUNT_DIR="/var/run/secrets/kubernetes.io/serviceaccount" +KUBERNETES_SERVICE_SCHEME=$(case $KUBERNETES_SERVICE_PORT in 80|8080|8081) echo "http";; *) echo "https"; esac) +KUBERNETES_SERVER_URL="$KUBERNETES_SERVICE_SCHEME"://"$KUBERNETES_SERVICE_HOST":"$KUBERNETES_SERVICE_PORT" +KUBERNETES_CLUSTER_CA_FILE="$SERVICE_ACCOUNT_DIR"/ca.crt +KUBERNETES_NAMESPACE=$(cat "$SERVICE_ACCOUNT_DIR"/namespace) +KUBERNETES_USER_TOKEN=$(cat "$SERVICE_ACCOUNT_DIR"/token) +KUBERNETES_CONTEXT="inCluster" + +rm -rf "$HOME"/.kube +mkdir -p "$HOME"/.kube +cat << EOF > "$HOME"/.kube/config +apiVersion: v1 +kind: Config +preferences: {} +current-context: $KUBERNETES_CONTEXT +clusters: +- cluster: + server: $KUBERNETES_SERVER_URL + certificate-authority: $KUBERNETES_CLUSTER_CA_FILE + name: inCluster +users: +- name: podServiceAccount + user: + token: $KUBERNETES_USER_TOKEN +contexts: +- context: + cluster: inCluster + user: podServiceAccount + namespace: $KUBERNETES_NAMESPACE + name: $KUBERNETES_CONTEXT +EOF + +cnoe-cli "$@" diff --git a/examples/k8s-apply/skeleton/cm.yaml b/examples/k8s-apply/skeleton/cm.yaml new file mode 100644 index 0000000..624feac --- /dev/null +++ b/examples/k8s-apply/skeleton/cm.yaml @@ -0,0 +1,17 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: game-demo +data: + # property-like keys; each key maps to a simple value + player_initial_lives: "3" + ui_properties_file_name: "user-interface.properties" + + # file-like keys + game.properties: | + enemy.types=aliens,monsters + player.maximum-lives=5 + user-interface.properties: | + color.good=purple + color.bad=yellow + allow.textmode=true diff --git a/examples/k8s-apply/template-manifest-object.yaml b/examples/k8s-apply/template-manifest-object.yaml new file mode 100644 index 0000000..f88963d --- /dev/null +++ b/examples/k8s-apply/template-manifest-object.yaml @@ -0,0 +1,41 @@ +apiVersion: scaffolder.backstage.io/v1beta3 +kind: Template +metadata: + name: deploy-resources-object + title: Deploy Resources using object + description: Deploy Resource to Kubernetes +spec: + owner: guest + type: service + # these are the steps which are rendered in the frontend with the form input + parameters: [] + steps: + - id: template + name: Generating component + action: fetch:template + input: + url: ./skeleton + - id: apply + name: apply-manifest + action: cnoe:kubernetes:apply + input: + namespaced: true + manifestObject: + apiVersion: v1 + kind: ConfigMap + metadata: + name: game-demo + data: + # property-like keys; each key maps to a simple value + player_initial_lives: "3" + ui_properties_file_name: "user-interface.properties" + + # file-like keys + game.properties: | + enemy.types=aliens,monsters + player.maximum-lives=5 + user-interface.properties: | + color.good=purple + color.bad=yellow + allow.textmode=true + clusterName: local diff --git a/examples/k8s-apply/template-manifest-string.yaml b/examples/k8s-apply/template-manifest-string.yaml new file mode 100644 index 0000000..312f557 --- /dev/null +++ b/examples/k8s-apply/template-manifest-string.yaml @@ -0,0 +1,41 @@ +apiVersion: scaffolder.backstage.io/v1beta3 +kind: Template +metadata: + name: deploy-resources-string + title: Deploy Resources using literal string + description: Deploy Resource to Kubernetes +spec: + owner: guest + type: service + # these are the steps which are rendered in the frontend with the form input + parameters: [] + steps: + - id: template + name: Generating component + action: fetch:template + input: + url: ./skeleton + - id: apply + name: apply-manifest + action: cnoe:kubernetes:apply + input: + namespaced: true + manifestString: | + apiVersion: v1 + kind: ConfigMap + metadata: + name: game-demo + data: + # property-like keys; each key maps to a simple value + player_initial_lives: "3" + ui_properties_file_name: "user-interface.properties" + + # file-like keys + game.properties: | + enemy.types=aliens,monsters + player.maximum-lives=5 + user-interface.properties: | + color.good=purple + color.bad=yellow + allow.textmode=true + clusterName: local diff --git a/examples/k8s-apply/template.yaml b/examples/k8s-apply/template.yaml new file mode 100644 index 0000000..3f097c3 --- /dev/null +++ b/examples/k8s-apply/template.yaml @@ -0,0 +1,30 @@ +apiVersion: scaffolder.backstage.io/v1beta3 +kind: Template +metadata: + name: deploy-resources + title: Deploy Resources + description: Deploy Resource to Kubernetes +spec: + owner: guest + type: service + # these are the steps which are rendered in the frontend with the form input + parameters: + - title: file name + properties: + path: + type: string + description: file name + default: cm.yaml + steps: + - id: template + name: Generating component + action: fetch:template + input: + url: ./skeleton + - id: apply + name: apply-manifest + action: cnoe:kubernetes:apply + input: + namespaced: true + manifestPath: cm.yaml + clusterName: local diff --git a/package.json b/package.json index 73cfec7..7e09d0f 100644 --- a/package.json +++ b/package.json @@ -31,8 +31,8 @@ ] }, "devDependencies": { - "@backstage/cli": "^0.25.1", - "@backstage/e2e-test-utils": "^0.1.0", + "@backstage/cli": "^0.32.0", + "@backstage/e2e-test-utils": "^0.1.1", "@playwright/test": "^1.32.3", "@spotify/prettier-config": "^12.0.0", "concurrently": "^8.0.0", diff --git a/packages/app/package.json b/packages/app/package.json index 37f5c9b..aba7302 100644 --- a/packages/app/package.json +++ b/packages/app/package.json @@ -14,36 +14,33 @@ "lint": "backstage-cli package lint" }, "dependencies": { - "@backstage/app-defaults": "~1.4.7", - "@backstage/catalog-model": "~1.4.3", - "@backstage/cli": "~0.25.1", - "@backstage/core-app-api": "~1.11.3", - "@backstage/core-components": "~0.13.10", - "@backstage/core-plugin-api": "~1.8.2", - "@backstage/integration-react": "~1.1.23", - "@backstage/plugin-api-docs": "~0.10.3", - "@backstage/plugin-catalog": "~1.16.1", - "@backstage/plugin-catalog-common": "~1.0.20", - "@backstage/plugin-catalog-graph": "~0.3.3", - "@backstage/plugin-catalog-import": "~0.10.5", - "@backstage/plugin-catalog-react": "~1.9.3", - "@backstage/plugin-github-actions": "~0.6.10", - "@backstage/plugin-home": "~0.6.1", - "@backstage/plugin-kubernetes": "~0.11.4", - "@backstage/plugin-org": "~0.6.19", - "@backstage/plugin-permission-react": "~0.4.19", - "@backstage/plugin-scaffolder": "~1.17.1", - "@backstage/plugin-search": "~1.4.5", - "@backstage/plugin-search-react": "~1.7.5", - "@backstage/plugin-tech-radar": "~0.6.12", - "@backstage/plugin-techdocs": "~1.9.3", - "@backstage/plugin-techdocs-module-addons-contrib": "~1.1.4", - "@backstage/plugin-techdocs-react": "~1.1.15", - "@backstage/plugin-user-settings": "~0.8.0", - "@backstage/theme": "~0.5.0", - "@internal/plugin-apache-spark": "^0.1.0", - "@internal/plugin-argo-workflows": "^0.1.0", - "@internal/plugin-cnoe-ui": "^0.1.0", + "@backstage-community/plugin-github-actions": "^0.6.16", + "@backstage-community/plugin-tech-radar": "^0.7.4", + "@backstage/app-defaults": "^1.6.1", + "@backstage/catalog-model": "^1.7.3", + "@backstage/cli": "^0.32.0", + "@backstage/core-app-api": "^1.16.1", + "@backstage/core-components": "^0.17.1", + "@backstage/core-plugin-api": "^1.10.6", + "@backstage/integration-react": "^1.2.6", + "@backstage/plugin-api-docs": "^0.12.6", + "@backstage/plugin-catalog": "^1.29.0", + "@backstage/plugin-catalog-common": "^1.1.3", + "@backstage/plugin-catalog-graph": "^0.4.18", + "@backstage/plugin-catalog-import": "^0.12.13", + "@backstage/plugin-catalog-react": "^1.17.0", + "@backstage/plugin-home": "^0.8.7", + "@backstage/plugin-kubernetes": "^0.12.6", + "@backstage/plugin-org": "^0.6.38", + "@backstage/plugin-permission-react": "^0.4.33", + "@backstage/plugin-scaffolder": "^1.30.1", + "@backstage/plugin-search": "^1.4.25", + "@backstage/plugin-search-react": "^1.8.8", + "@backstage/plugin-techdocs": "^1.12.5", + "@backstage/plugin-techdocs-module-addons-contrib": "^1.1.23", + "@backstage/plugin-techdocs-react": "^1.2.16", + "@backstage/plugin-user-settings": "^0.8.21", + "@backstage/theme": "^0.6.5", "@material-ui/core": "^4.12.2", "@material-ui/icons": "^4.9.1", "@roadiehq/backstage-plugin-argo-cd": "^2.5.1", @@ -55,7 +52,7 @@ "react-use": "^17.2.4" }, "devDependencies": { - "@backstage/test-utils": "^1.4.7", + "@backstage/test-utils": "^1.7.7", "@playwright/test": "^1.32.3", "@testing-library/dom": "^9.0.0", "@testing-library/jest-dom": "^6.0.0", diff --git a/packages/app/src/App.tsx b/packages/app/src/App.tsx index 87faf6d..74fc096 100644 --- a/packages/app/src/App.tsx +++ b/packages/app/src/App.tsx @@ -13,7 +13,7 @@ import { import { ScaffolderPage, scaffolderPlugin } from '@backstage/plugin-scaffolder'; import { orgPlugin } from '@backstage/plugin-org'; import { SearchPage } from '@backstage/plugin-search'; -import { TechRadarPage } from '@backstage/plugin-tech-radar'; +import { TechRadarPage } from '@backstage-community/plugin-tech-radar'; import { TechDocsIndexPage, techdocsPlugin, @@ -32,18 +32,8 @@ import { createApp } from '@backstage/app-defaults'; import { AppRouter, FlatRoutes } from '@backstage/core-app-api'; import { CatalogGraphPage } from '@backstage/plugin-catalog-graph'; import { RequirePermission } from '@backstage/plugin-permission-react'; -import { ThemeProvider } from '@material-ui/core/styles'; -import CssBaseline from '@material-ui/core/CssBaseline'; import { catalogEntityCreatePermission } from '@backstage/plugin-catalog-common/alpha'; -import LightIcon from '@material-ui/icons/WbSunny'; -import { - CNOEHomepage, - cnoeLightTheme, - cnoeDarkTheme, -} from '@internal/plugin-cnoe-ui'; import {configApiRef, useApi} from "@backstage/core-plugin-api"; -import { ArgoWorkflowsPage } from '@internal/plugin-argo-workflows'; -import { ApacheSparkPage } from '@internal/plugin-apache-spark'; const app = createApp({ apis, @@ -66,7 +56,6 @@ const app = createApp({ ); }, }, - bindRoutes({ bind }) { bind(catalogPlugin.externalRoutes, { createComponent: scaffolderPlugin.routes.root, @@ -83,37 +72,12 @@ const app = createApp({ bind(orgPlugin.externalRoutes, { catalogIndex: catalogPlugin.routes.catalogIndex, }); - }, - themes: [ - { - id: 'cnoe-light-theme', - title: 'Light Theme', - variant: 'light', - icon: , - Provider: ({ children }) => ( - - {children} - - ), - }, - { - id: 'cnoe-dark-theme', - title: 'Dark Theme', - variant: 'dark', - icon: , - Provider: ({ children }) => ( - - {children} - - ), - }, - ], + } }); const routes = ( - } /> - } /> + } /> } /> } /> } /> - } /> - } /> ); diff --git a/packages/app/src/apis.ts b/packages/app/src/apis.ts index 8d9a4ef..68db2f8 100644 --- a/packages/app/src/apis.ts +++ b/packages/app/src/apis.ts @@ -13,7 +13,7 @@ import {OAuth2} from "@backstage/core-app-api"; export const keycloakOIDCAuthApiRef: ApiRef< OpenIdConnectApi & ProfileInfoApi & BackstageIdentityApi & SessionApi > = createApiRef({ - id: 'auth.keycloak-oidc-provider', + id: 'auth.keycloak-oidc', }); export const apis: AnyApiFactory[] = [ createApiFactory({ diff --git a/packages/app/src/components/Root/Root.tsx b/packages/app/src/components/Root/Root.tsx index 6768b48..51a4e86 100644 --- a/packages/app/src/components/Root/Root.tsx +++ b/packages/app/src/components/Root/Root.tsx @@ -5,8 +5,6 @@ import ExtensionIcon from '@material-ui/icons/Extension'; import MapIcon from '@material-ui/icons/MyLocation'; import LibraryBooks from '@material-ui/icons/LibraryBooks'; import CreateComponentIcon from '@material-ui/icons/AddCircleOutline'; -import LogoFull from './LogoFull'; -import LogoIcon from './LogoIcon'; import { Settings as SidebarSettings, UserSettingsSignInAvatar, @@ -21,7 +19,6 @@ import { SidebarPage, SidebarScrollWrapper, SidebarSpace, - useSidebarOpenState, Link, } from '@backstage/core-components'; import MenuIcon from '@material-ui/icons/Menu'; @@ -44,12 +41,10 @@ const useSidebarLogoStyles = makeStyles({ const SidebarLogo = () => { const classes = useSidebarLogoStyles(); - const { isOpen } = useSidebarOpenState(); return (
- {isOpen ? : }
); diff --git a/packages/app/src/components/catalog/EntityPage.tsx b/packages/app/src/components/catalog/EntityPage.tsx index 30e5db9..a56185c 100644 --- a/packages/app/src/components/catalog/EntityPage.tsx +++ b/packages/app/src/components/catalog/EntityPage.tsx @@ -10,11 +10,8 @@ import { } from '@backstage/plugin-api-docs'; import { EntityAboutCard, - EntityDependsOnComponentsCard, - EntityDependsOnResourcesCard, EntityHasComponentsCard, EntityHasResourcesCard, - EntityHasSubcomponentsCard, EntityHasSystemsCard, EntityLayout, EntityLinksCard, @@ -28,10 +25,6 @@ import { hasRelationWarnings, EntityRelationWarning, } from '@backstage/plugin-catalog'; -import { - isGithubActionsAvailable, - EntityGithubActionsContent, -} from '@backstage/plugin-github-actions'; import { EntityUserProfileCard, EntityGroupProfileCard, @@ -58,19 +51,13 @@ import { import { TechDocsAddons } from '@backstage/plugin-techdocs-react'; import { ReportIssue } from '@backstage/plugin-techdocs-module-addons-contrib'; -import { EntityKubernetesContent } from '@backstage/plugin-kubernetes'; +import { EntityKubernetesContent, isKubernetesAvailable } from '@backstage/plugin-kubernetes'; import { EntityArgoCDOverviewCard, isArgocdAvailable } from '@roadiehq/backstage-plugin-argo-cd'; -import { - EntityArgoWorkflowsOverviewCard, EntityArgoWorkflowsTemplateOverviewCard, - isArgoWorkflowsAvailable, -} from '@internal/plugin-argo-workflows'; -import {ApacheSparkPage, isApacheSparkAvailable} from "@internal/plugin-apache-spark"; - const techdocsContent = ( @@ -80,13 +67,7 @@ const techdocsContent = ( ); const cicdContent = ( - // This is an example of how you can implement your company's logic in entity page. - // You can for example enforce that all components of type 'service' should use GitHubActions - - - - - - isArgoWorkflowsAvailable(e)}> - - - - - - - - - - - - - ); @@ -181,14 +147,10 @@ const serviceEntityPage = ( {cicdContent} - + isKubernetesAvailable(e)}> - - - - @@ -200,17 +162,6 @@ const serviceEntityPage = ( - - - - - - - - - - - {techdocsContent} @@ -227,17 +178,6 @@ const websiteEntityPage = ( {cicdContent} - - - - - - - - - - - {techdocsContent} @@ -288,9 +228,6 @@ const apiPage = ( - - - diff --git a/packages/backend/package.json b/packages/backend/package.json index 88ed161..1593c24 100644 --- a/packages/backend/package.json +++ b/packages/backend/package.json @@ -16,48 +16,57 @@ "build-image": "docker build ../.. -f Dockerfile --tag backstage" }, "dependencies": { - "@backstage/backend-common": "~0.20.1", - "@backstage/backend-tasks": "~0.5.14", - "@backstage/catalog-client": "~1.5.2", - "@backstage/catalog-model": "~1.4.3", - "@backstage/config": "~1.1.1", - "@backstage/errors": "~1.2.3", - "@backstage/integration": "^1.9.0", - "@backstage/plugin-app-backend": "~0.3.57", - "@backstage/plugin-auth-backend": "~0.20.3", - "@backstage/plugin-auth-node": "~0.4.3", - "@backstage/plugin-catalog-backend": "~1.16.1", - "@backstage/plugin-catalog-backend-module-scaffolder-entity-model": "~0.1.6", - "@backstage/plugin-kubernetes-backend": "~0.14.1", - "@backstage/plugin-permission-common": "~0.7.12", - "@backstage/plugin-permission-node": "~0.7.20", - "@backstage/plugin-proxy-backend": "~0.4.7", - "@backstage/plugin-scaffolder-backend": "~1.20.0", - "@backstage/plugin-scaffolder-node": "~0.2.9", - "@backstage/plugin-search-backend": "~1.4.9", - "@backstage/plugin-search-backend-module-catalog": "~0.1.13", - "@backstage/plugin-search-backend-module-pg": "~0.5.18", - "@backstage/plugin-search-backend-module-techdocs": "~0.1.13", - "@backstage/plugin-search-backend-node": "~1.2.13", - "@backstage/plugin-techdocs-backend": "~1.9.2", - "@backstage/types": "~1.1.1", - "@jessesanford/plugin-scaffolder-actions": "^0.1.0", - "@roadiehq/backstage-plugin-argo-cd-backend": "~2.14.0", - "@roadiehq/scaffolder-backend-module-utils": "~1.13.1", + "@backstage/backend-common": "^0.25.0", + "@backstage/backend-defaults": "^0.9.0", + "@backstage/backend-plugin-api": "^1.3.0", + "@backstage/backend-tasks": "^0.6.1", + "@backstage/catalog-client": "^1.9.1", + "@backstage/catalog-model": "^1.7.3", + "@backstage/config": "^1.3.2", + "@backstage/errors": "^1.2.7", + "@backstage/integration": "^1.16.3", + "@backstage/plugin-app-backend": "^0.5.1", + "@backstage/plugin-auth-backend": "^0.24.5", + "@backstage/plugin-auth-backend-module-guest-provider": "^0.2.7", + "@backstage/plugin-auth-backend-module-oidc-provider": "^0.4.2", + "@backstage/plugin-auth-node": "^0.6.2", + "@backstage/plugin-catalog-backend": "^1.32.1", + "@backstage/plugin-catalog-backend-module-scaffolder-entity-model": "^0.2.7", + "@backstage/plugin-kubernetes-backend": "^0.19.5", + "@backstage/plugin-permission-common": "^0.8.4", + "@backstage/plugin-permission-node": "^0.9.1", + "@backstage/plugin-proxy-backend": "^0.6.1", + "@backstage/plugin-scaffolder-backend": "^1.32.1", + "@backstage/plugin-scaffolder-backend-module-gitea": "^0.2.8", + "@backstage/plugin-scaffolder-backend-module-github": "^0.7.0", + "@backstage/plugin-scaffolder-node": "^0.8.1", + "@backstage/plugin-search-backend": "^2.0.1", + "@backstage/plugin-search-backend-module-catalog": "^0.3.3", + "@backstage/plugin-search-backend-module-pg": "^0.5.43", + "@backstage/plugin-search-backend-module-techdocs": "^0.4.1", + "@backstage/plugin-search-backend-node": "^1.3.10", + "@backstage/plugin-techdocs-backend": "^2.0.1", + "@backstage/types": "^1.2.1", + "@kubernetes/client-node": "~0.20.0", + "@roadiehq/backstage-plugin-argo-cd-backend": "3.1.0", + "@roadiehq/scaffolder-backend-module-http-request": "^4.3.5", + "@roadiehq/scaffolder-backend-module-utils": "3.0.0", "app": "link:../app", "better-sqlite3": "^9.0.0", "dockerode": "^3.3.1", "express": "^4.17.1", "express-promise-router": "^4.1.0", + "fs-extra": "~11.2.0", "node-gyp": "^9.0.0", "pg": "^8.11.3", "winston": "^3.2.1" }, "devDependencies": { - "@backstage/cli": "^0.25.1", + "@backstage/cli": "^0.32.0", "@types/dockerode": "^3.3.0", "@types/express": "^4.17.6", "@types/express-serve-static-core": "^4.17.5", + "@types/fs-extra": "^11.0.4", "@types/luxon": "^2.0.4" }, "files": [ diff --git a/packages/backend/src/index.ts b/packages/backend/src/index.ts index d81d859..5fc9d6b 100644 --- a/packages/backend/src/index.ts +++ b/packages/backend/src/index.ts @@ -1,123 +1,45 @@ -/* - * Hi! - * - * Note that this is an EXAMPLE Backstage backend. Please check the README. - * - * Happy hacking! - */ +import { createBackend } from '@backstage/backend-defaults'; +import { cnoeScaffolderActions } from './plugins/scaffolder'; +import { authModuleKeycloakOIDCProvider } from './plugins/auth'; -import Router from 'express-promise-router'; -import { - createServiceBuilder, - loadBackendConfig, - getRootLogger, - useHotMemoize, - notFoundHandler, - CacheManager, - DatabaseManager, - HostDiscovery, - UrlReaders, - ServerTokenManager, -} from '@backstage/backend-common'; -import { TaskScheduler } from '@backstage/backend-tasks'; -import { Config } from '@backstage/config'; -import app from './plugins/app'; -import auth from './plugins/auth'; -import catalog from './plugins/catalog'; -import scaffolder from './plugins/scaffolder'; -import proxy from './plugins/proxy'; -import techdocs from './plugins/techdocs'; -import search from './plugins/search'; -import { PluginEnvironment } from './types'; -import { ServerPermissionClient } from '@backstage/plugin-permission-node'; -import { DefaultIdentityClient } from '@backstage/plugin-auth-node'; +const backend = createBackend(); -import kubernetes from './plugins/kubernetes'; -import argocd from './plugins/argocd'; -function makeCreateEnv(config: Config) { - const root = getRootLogger(); - const reader = UrlReaders.default({ logger: root, config }); - const discovery = HostDiscovery.fromConfig(config); - const cacheManager = CacheManager.fromConfig(config); - const databaseManager = DatabaseManager.fromConfig(config, { logger: root }); - const tokenManager = ServerTokenManager.noop(); - const taskScheduler = TaskScheduler.fromConfig(config, { databaseManager }); +// core plugins +backend.add(import('@backstage/plugin-app-backend')); +backend.add(import('@backstage/plugin-catalog-backend')); +backend.add(import('@backstage/plugin-proxy-backend')); +backend.add(import('@backstage/plugin-techdocs-backend/alpha')); - const identity = DefaultIdentityClient.create({ - discovery, - }); - const permissions = ServerPermissionClient.fromConfig(config, { - discovery, - tokenManager, - }); +// auth plugins +backend.add(import('@backstage/plugin-auth-backend')); +backend.add(import('@backstage/plugin-auth-backend-module-guest-provider')); - root.info(`Created UrlReader ${reader}`); +// scaffolder plugins +backend.add(import('@backstage/plugin-scaffolder-backend/alpha')); +backend.add( + import('@backstage/plugin-catalog-backend-module-scaffolder-entity-model'), +); +backend.add(import('@backstage/plugin-scaffolder-backend-module-github')); - return (plugin: string): PluginEnvironment => { - const logger = root.child({ type: 'plugin', plugin }); - const database = databaseManager.forPlugin(plugin); - const cache = cacheManager.forPlugin(plugin); - const scheduler = taskScheduler.forPlugin(plugin); - return { - logger, - database, - cache, - config, - reader, - discovery, - tokenManager, - scheduler, - permissions, - identity, - }; - }; -} +// search plugins +backend.add(import('@backstage/plugin-search-backend/alpha')); -async function main() { - const config = await loadBackendConfig({ - argv: process.argv, - logger: getRootLogger(), - }); - const createEnv = makeCreateEnv(config); +backend.add(import('@backstage/plugin-search-backend-module-catalog')); +backend.add(import('@backstage/plugin-search-backend-module-techdocs/alpha')); - const catalogEnv = useHotMemoize(module, () => createEnv('catalog')); - const scaffolderEnv = useHotMemoize(module, () => createEnv('scaffolder')); - const authEnv = useHotMemoize(module, () => createEnv('auth')); - const proxyEnv = useHotMemoize(module, () => createEnv('proxy')); - const techdocsEnv = useHotMemoize(module, () => createEnv('techdocs')); - const searchEnv = useHotMemoize(module, () => createEnv('search')); - const appEnv = useHotMemoize(module, () => createEnv('app')); +// other @backstage plugins +backend.add(import('@backstage/plugin-kubernetes-backend')); - const kubernetesEnv = useHotMemoize(module, () => createEnv('kubernetes')); - const argocdEnv = useHotMemoize(module, () => createEnv('argocd')); +// roadie plugins +backend.add(import('@roadiehq/scaffolder-backend-module-utils/new-backend')); +backend.add(import('./plugins/argocd_index')); - const apiRouter = Router(); - apiRouter.use('/catalog', await catalog(catalogEnv)); - apiRouter.use('/scaffolder', await scaffolder(scaffolderEnv)); - apiRouter.use('/auth', await auth(authEnv)); - apiRouter.use('/techdocs', await techdocs(techdocsEnv)); - apiRouter.use('/proxy', await proxy(proxyEnv)); - apiRouter.use('/search', await search(searchEnv)); +backend.add( + import('@roadiehq/scaffolder-backend-module-http-request/new-backend'), +); - apiRouter.use('/kubernetes', await kubernetes(kubernetesEnv)); - apiRouter.use('/argocd', await argocd(argocdEnv)); +// cnoe plugins +backend.add(authModuleKeycloakOIDCProvider); +backend.add(cnoeScaffolderActions); - // Add backends ABOVE this line; this 404 handler is the catch-all fallback - apiRouter.use(notFoundHandler()); - - const service = createServiceBuilder(module) - .loadConfig(config) - .addRouter('/api', apiRouter) - .addRouter('', await app(appEnv)); - - await service.start().catch(err => { - console.log(err); - process.exit(1); - }); -} - -module.hot?.accept(); -main().catch(error => { - console.error('Backend failed to start up', error); - process.exit(1); -}); +backend.start(); \ No newline at end of file diff --git a/packages/backend/src/plugins/app.ts b/packages/backend/src/plugins/app.ts deleted file mode 100644 index 7c37f68..0000000 --- a/packages/backend/src/plugins/app.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { createRouter } from '@backstage/plugin-app-backend'; -import { Router } from 'express'; -import { PluginEnvironment } from '../types'; - -export default async function createPlugin( - env: PluginEnvironment, -): Promise { - return await createRouter({ - logger: env.logger, - config: env.config, - database: env.database, - appPackageName: 'app', - }); -} diff --git a/packages/backend/src/plugins/argocd.ts b/packages/backend/src/plugins/argocd.ts index 53fb00d..8bcd3e9 100644 --- a/packages/backend/src/plugins/argocd.ts +++ b/packages/backend/src/plugins/argocd.ts @@ -1,24 +1,45 @@ -import {Config} from "@backstage/config"; -import {createTemplateAction} from "@backstage/plugin-scaffolder-node"; -import {examples} from "./gitea-actions"; -import {Logger} from "winston"; - +import { Config } from '@backstage/config'; +import { createTemplateAction } from '@backstage/plugin-scaffolder-node'; +import { examples } from './gitea-actions'; +import { Logger } from 'winston'; import { ArgoService } from '@roadiehq/backstage-plugin-argo-cd-backend'; - import { createRouter } from '@roadiehq/backstage-plugin-argo-cd-backend'; -import { PluginEnvironment } from '../types'; +import { loggerToWinstonLogger } from '@backstage/backend-common'; -export default async function createPlugin({ - logger, - config, - }: PluginEnvironment) { - return await createRouter({ logger, config }); -} +import { + coreServices, + createBackendPlugin, +} from '@backstage/backend-plugin-api'; -export function createArgoCDApp(options: { - config: Config; - logger: Logger -}) { +export const argocdPlugin = createBackendPlugin({ + pluginId: 'argocd', + register(env) { + env.registerInit({ + deps: { + logger: coreServices.logger, + config: coreServices.rootConfig, + reader: coreServices.urlReader, + discovery: coreServices.discovery, + auth: coreServices.auth, + httpRouter: coreServices.httpRouter, + }, + async init({ + logger, + config, + httpRouter, + }) { + httpRouter.use( + await createRouter({ + logger: loggerToWinstonLogger(logger), + config, + }), + ); + }, + }); + }, +}); + +export function createArgoCDApp(options: { config: Config; logger: Logger }) { const { config, logger } = options; return createTemplateAction<{ @@ -28,16 +49,22 @@ export function createArgoCDApp(options: { argoInstance: string; path: string; labelValue?: string; - appNamespace: string + appNamespace: string; }>({ id: 'cnoe:create-argocd-app', - description: - 'creates argocd app', + description: 'creates argocd app', examples, schema: { input: { type: 'object', - required: ['repoUrl', 'projectName', 'appName', 'argoInstance', 'path', 'appNamespace'], + required: [ + 'repoUrl', + 'projectName', + 'appName', + 'argoInstance', + 'path', + 'appNamespace', + ], properties: { repoUrl: { title: 'Repository Location', @@ -66,14 +93,12 @@ export function createArgoCDApp(options: { labelValue: { title: 'for argocd plugin to locate this app', type: 'string', - } + }, }, }, - output: { - }, + output: {}, }, async handler(ctx) { - const { repoUrl, projectName, @@ -81,7 +106,7 @@ export function createArgoCDApp(options: { argoInstance, path, labelValue, - appNamespace + appNamespace, } = ctx.input; const argoUserName = @@ -130,7 +155,7 @@ export function createArgoCDApp(options: { sourceRepo: repoUrl, sourcePath: path, labelValue: labelValue ? labelValue : appName, - }) + }); }, }); } diff --git a/packages/backend/src/plugins/argocd_index.ts b/packages/backend/src/plugins/argocd_index.ts new file mode 100644 index 0000000..bd0bc7e --- /dev/null +++ b/packages/backend/src/plugins/argocd_index.ts @@ -0,0 +1 @@ +export { argocdPlugin as default } from './argocd'; \ No newline at end of file diff --git a/packages/backend/src/plugins/auth.ts b/packages/backend/src/plugins/auth.ts index e7287f2..513a509 100644 --- a/packages/backend/src/plugins/auth.ts +++ b/packages/backend/src/plugins/auth.ts @@ -1,57 +1,68 @@ -import { - createRouter, - providers, - defaultAuthProviderFactories, -} from '@backstage/plugin-auth-backend'; -import { Router } from 'express'; -import { PluginEnvironment } from '../types'; import { DEFAULT_NAMESPACE, stringifyEntityRef, } from '@backstage/catalog-model'; import { JsonArray } from '@backstage/types'; +import { createBackendModule } from '@backstage/backend-plugin-api'; +import { + authProvidersExtensionPoint, + createOAuthProviderFactory, + OAuthAuthenticatorResult, +} from '@backstage/plugin-auth-node'; +import { + oidcAuthenticator, + OidcAuthResult, +} from '@backstage/plugin-auth-backend-module-oidc-provider'; -export default async function createPlugin( - env: PluginEnvironment, -): Promise { - const opts = { - logger: env.logger, - config: env.config, - database: env.database, - discovery: env.discovery, - tokenManager: env.tokenManager, - providerFactories: { - ...defaultAuthProviderFactories, - }, - }; - - const envName = env.config - .getOptionalConfig('auth') - ?.getOptionalString('auth'); - if (envName === 'local') { - return await createRouter(opts); - } - - const keycloakAuth = (opts.providerFactories['keycloak-oidc'] = - providers.oidc.create({ - signIn: { - resolver(info, ctx) { - const userRef = stringifyEntityRef({ - kind: 'User', - name: info.result.userinfo.sub, - namespace: DEFAULT_NAMESPACE, - }); - return ctx.issueToken({ - claims: { - sub: userRef, - ent: [userRef], - groups: (info.result.userinfo.groups as JsonArray) || [], - }, - }); - }, +export const authModuleKeycloakOIDCProvider = createBackendModule({ + pluginId: 'auth', + moduleId: 'keycloak-oidc', + register(reg) { + reg.registerInit({ + deps: { + providers: authProvidersExtensionPoint, }, - })); - opts.providerFactories['keycloak-oidc'] = keycloakAuth; + async init({ providers }) { + providers.registerProvider({ + providerId: 'keycloak-oidc', + factory: createOAuthProviderFactory({ + authenticator: oidcAuthenticator, + profileTransform: async ( + input: OAuthAuthenticatorResult, + ) => ({ + profile: { + email: input.fullProfile.userinfo.email, + picture: input.fullProfile.userinfo.picture, + displayName: input.fullProfile.userinfo.name, + }, + }), + async signInResolver(info, ctx) { + const { profile } = info; + if (!profile.displayName) { + throw new Error( + 'Login failed, user profile does not contain a valid name', + ); + } + // should use users from catalog + const userRef = stringifyEntityRef({ + kind: 'User', + name: info.profile.displayName!, + namespace: DEFAULT_NAMESPACE, + }); - return await createRouter(opts); -} + return ctx.issueToken({ + claims: { + sub: userRef, + ent: [userRef], + groups: + (info.result.fullProfile.userinfo.groups as JsonArray) || + [], + }, + }); + }, + }), + }); + }, + }); + }, +}); diff --git a/packages/backend/src/plugins/catalog.ts b/packages/backend/src/plugins/catalog.ts deleted file mode 100644 index 4decdca..0000000 --- a/packages/backend/src/plugins/catalog.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { CatalogBuilder } from '@backstage/plugin-catalog-backend'; -import { ScaffolderEntitiesProcessor } from '@backstage/plugin-catalog-backend-module-scaffolder-entity-model'; -import { Router } from 'express'; -import { PluginEnvironment } from '../types'; - -export default async function createPlugin( - env: PluginEnvironment, -): Promise { - const builder = await CatalogBuilder.create(env); - builder.addProcessor(new ScaffolderEntitiesProcessor()); - const { processingEngine, router } = await builder.build(); - await processingEngine.start(); - return router; -} diff --git a/packages/backend/src/plugins/gitea-actions.ts b/packages/backend/src/plugins/gitea-actions.ts index 81d7cbb..0780299 100644 --- a/packages/backend/src/plugins/gitea-actions.ts +++ b/packages/backend/src/plugins/gitea-actions.ts @@ -1,16 +1,16 @@ -// this is necessary until https://github.com/backstage/backstage/pull/21890/ is merged and released. import { InputError } from '@backstage/errors'; import { Config } from '@backstage/config'; import { getGiteaRequestOptions, GiteaIntegrationConfig, - ScmIntegrationRegistry, ScmIntegrations, + ScmIntegrationRegistry, + ScmIntegrations, } from '@backstage/integration'; import { createTemplateAction, getRepoSourceDirectory, initRepoAndPush, - TemplateExample + TemplateExample, } from '@backstage/plugin-scaffolder-node'; import crypto from 'crypto'; import yaml from 'yaml'; @@ -290,7 +290,6 @@ const checkGiteaOrg = async ( } }; - const createGiteaProject = async ( config: GiteaIntegrationConfig, options: { @@ -357,8 +356,6 @@ const createGiteaProject = async ( ); } } - - }; const generateCommitMessage = ( @@ -508,7 +505,6 @@ export function createPublishGiteaAction(options: { }, }, async handler(ctx) { - const { repoUrl, description, @@ -555,7 +551,7 @@ export function createPublishGiteaAction(options: { : config.getOptionalString('scaffolder.defaultAuthor.email'), }; // The owner to be used should be either the org name or user authenticated with the gitea server - const repoOwner = owner ? owner: username + const repoOwner = owner ? owner : username; const remoteUrl = `${integrationConfig.config.baseUrl}/${repoOwner}/${repo}.git`; const commitResult = await initRepoAndPush({ dir: getRepoSourceDirectory(ctx.workspacePath, sourcePath), @@ -569,7 +565,8 @@ export function createPublishGiteaAction(options: { // Check if the gitea repo URL is available before to exit const operationTimeLimit = 5000; // 20 seconds - const sleep = (ms: number | undefined) => new Promise(r => setTimeout(r, ms)); + const sleep = (ms: number | undefined) => + new Promise(r => setTimeout(r, ms)); await sleep(operationTimeLimit); // await checkAvailabilityGiteaRepository( // integrationConfig.config, { @@ -601,5 +598,3 @@ export function createPublishGiteaAction(options: { }, }); } - - diff --git a/packages/backend/src/plugins/k8s-apply.ts b/packages/backend/src/plugins/k8s-apply.ts new file mode 100644 index 0000000..2e108a2 --- /dev/null +++ b/packages/backend/src/plugins/k8s-apply.ts @@ -0,0 +1,255 @@ +import { + createTemplateAction, + executeShellCommand, +} from '@backstage/plugin-scaffolder-node'; +import { dumpYaml } from '@kubernetes/client-node'; +import yaml from 'js-yaml'; +import { Config } from '@backstage/config'; +import { resolveSafeChildPath } from '@backstage/backend-common'; +import fs from 'fs-extra'; + +interface Cluster { + name: string; + cluster: { + server: string; + 'insecure-skip-tls-verify': boolean; + 'certificate-authority-data'?: string; + 'certificate-authority'?: string; + }; +} +interface Context { + name: string; + context: { + cluster: string; + user: string; + }; +} +interface User { + name: string; + user: { + token?: string; + }; +} +interface ConfFile { + apiVersion: string; + kind: string; + 'current-context': string; + contexts: Context[]; + clusters: Cluster[]; + users: User[]; +} +export const createKubernetesApply = (config: Config) => { + return createTemplateAction<{ + manifestString?: string; + manifestObject?: any; + manifestPath?: string; + namespaced: boolean; + clusterName?: string; + }>({ + id: 'cnoe:kubernetes:apply', + schema: { + input: { + type: 'object', + required: ['namespaced'], + properties: { + manifestString: { + type: 'string', + title: 'Manifest', + description: + 'The manifest to apply in the cluster. Must be a string', + }, + manifestObject: { + type: 'object', + title: 'Manifest', + description: + 'The manifest to apply in the cluster. Must be an object', + }, + manifestPath: { + type: 'string', + title: 'Path to the manifest file', + description: 'The path to the manifest file.', + }, + namespaced: { + type: 'boolean', + title: 'Namespaced', + description: 'Whether the API is namespaced or not', + }, + clusterName: { + type: 'string', + title: 'Cluster Name', + description: 'The name of the cluster to apply this', + }, + }, + }, + output: { + type: 'object', + title: 'Returned object', + description: + 'The object returned by Kubernetes by performing this operation', + }, + }, + async handler(ctx) { + let manifestPath = resolveSafeChildPath( + ctx.workspacePath, + 'to-be-applied.yaml', + ); + if (ctx.input.manifestString) { + fs.writeFileSync(manifestPath, ctx.input.manifestString, { + encoding: 'utf8', + mode: '600', + }); + } else if (ctx.input.manifestObject) { + fs.writeFileSync(manifestPath, yaml.dump(ctx.input.manifestObject), { + encoding: 'utf8', + mode: '600', + }); + } else { + const filePath = resolveSafeChildPath( + ctx.workspacePath, + ctx.input.manifestPath!, + ); + manifestPath = filePath; + } + const fileContent = fs.readFileSync(manifestPath, 'utf8'); + const objList: any[] = yaml.loadAll(fileContent); + + if (ctx.input.clusterName) { + // Supports SA token authentication only + const targetCluster = getClusterConfig(ctx.input.clusterName!, config); + const confFile: ConfFile = { + apiVersion: 'v1', + kind: 'Config', + 'current-context': ctx.input.clusterName, + contexts: [ + { + name: ctx.input.clusterName, + context: { + cluster: ctx.input.clusterName, + user: ctx.input.clusterName, + }, + }, + ], + clusters: [ + { + name: ctx.input.clusterName, + cluster: { + server: targetCluster.getString('url'), + 'insecure-skip-tls-verify': + !!targetCluster.getOptionalBoolean('skipTLSVerify'), + }, + }, + ], + users: [ + { + name: ctx.input.clusterName, + user: { + token: targetCluster.getString('serviceAccountToken'), + }, + }, + ], + }; + if (!confFile.clusters[0].cluster['insecure-skip-tls-verify']) { + let caDataRaw = targetCluster.getOptionalString('caData'); + if (caDataRaw?.startsWith('-----BEGIN CERTIFICATE-----')) { + caDataRaw = Buffer.from( + targetCluster.getString('caData'), + 'utf8', + ).toString('base64'); + } + confFile.clusters[0].cluster['certificate-authority-data'] = + caDataRaw; + if ( + targetCluster.getOptionalString('caFile') && + !( + targetCluster.getOptionalString('caFile')?.length === 0 || + targetCluster.getOptionalString('caFile') === null + ) + ) { + confFile.clusters[0].cluster['certificate-authority'] = + targetCluster.getString('caFile'); + } + } + + const confString = dumpYaml(confFile); + const confFilePath = resolveSafeChildPath(ctx.workspacePath, 'config'); + fs.writeFileSync(confFilePath, confString, { + encoding: 'utf8', + mode: '600', + }); + await executeShellCommand({ + command: 'cat', + args: [confFilePath], + logStream: ctx.logStream, + }); + await executeShellCommand({ + command: 'cat', + args: [manifestPath], + logStream: ctx.logStream, + }); + let counter = 1; + for (const obj of objList) { + let manifestFilePath = resolveSafeChildPath( + ctx.workspacePath, + 'to-be-applied-' + counter.toString() + '.yaml', + ); + fs.writeFileSync(manifestFilePath, yaml.dump(obj), { + encoding: 'utf8', + mode: '600', + }); + if (obj.metadata.generateName !== undefined) { + await executeShellCommand({ + command: 'kubectl', + args: [ + '--kubeconfig', + confFilePath, + 'create', + '-f', + manifestFilePath, + ], + logStream: ctx.logStream, + }); + } else { + await executeShellCommand({ + command: 'kubectl', + args: [ + '--kubeconfig', + confFilePath, + 'apply', + '-f', + manifestFilePath, + ], + logStream: ctx.logStream, + }); + } + counter += 1; + } + return; + } + throw new Error('please specify a valid cluster name'); + }, + }); +}; + +// Finds the first cluster that matches the given name. +function getClusterConfig(name: string, config: Config): Config { + const clusterConfigs = config + .getConfigArray('kubernetes.clusterLocatorMethods') + .filter((val: Config) => { + return val.getString('type') === 'config'; + }); + + const clusters = new Array(); + clusterConfigs.filter((conf: Config) => { + const cluster = conf.getConfigArray('clusters').find((val: Config) => { + return val.getString('name') === name; + }); + if (cluster) { + clusters.push(cluster); + } + }); + + if (clusters.length === 0) { + throw new Error(`Cluster with name ${name} not found`); + } + return clusters[0]; +} \ No newline at end of file diff --git a/packages/backend/src/plugins/kubernetes.ts b/packages/backend/src/plugins/kubernetes.ts deleted file mode 100644 index 32c1c12..0000000 --- a/packages/backend/src/plugins/kubernetes.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { KubernetesBuilder } from '@backstage/plugin-kubernetes-backend'; -import { Router } from 'express'; -import { PluginEnvironment } from '../types'; -import { CatalogClient } from '@backstage/catalog-client'; - -export default async function createPlugin( - env: PluginEnvironment, -): Promise { - const catalogApi = new CatalogClient({ discoveryApi: env.discovery }); - const { router } = await KubernetesBuilder.createBuilder({ - logger: env.logger, - config: env.config, - catalogApi, - permissions: env.permissions, - }).build(); - - return router; -} diff --git a/packages/backend/src/plugins/proxy.ts b/packages/backend/src/plugins/proxy.ts index 54ec393..f61619d 100644 --- a/packages/backend/src/plugins/proxy.ts +++ b/packages/backend/src/plugins/proxy.ts @@ -1,4 +1,4 @@ -import { createRouter } from '@backstage/plugin-proxy-backend'; +import { createRouter } from '@roadiehq/backstage-plugin-argo-cd-backend'; import { Router } from 'express'; import { PluginEnvironment } from '../types'; @@ -7,7 +7,6 @@ export default async function createPlugin( ): Promise { return await createRouter({ logger: env.logger, - config: env.config, - discovery: env.discovery, + config: env.config }); } diff --git a/packages/backend/src/plugins/sanitize.ts b/packages/backend/src/plugins/sanitize.ts new file mode 100644 index 0000000..cb13f13 --- /dev/null +++ b/packages/backend/src/plugins/sanitize.ts @@ -0,0 +1,68 @@ +import { createTemplateAction } from '@backstage/plugin-scaffolder-node'; +import yaml from 'js-yaml'; + +// Add type annotations to fix TS2742 +type SanitizeResourceInput = { + document: string; +}; + +type SanitizeResourceOutput = { + sanitized: string; +}; + +export const createSanitizeResource = () => { + return createTemplateAction({ + id: 'cnoe:utils:sanitize', + schema: { + input: { + type: 'object', + required: ['document'], + properties: { + document: { + type: 'string', + title: 'Document', + description: 'The document to be sanitized', + }, + }, + }, + output: { + type: 'object', + properties: { + sanitized: { + type: 'string', + description: 'The sanitized yaml string', + }, + }, + }, + }, + async handler(ctx) { + const obj = yaml.load(ctx.input.document); + ctx.output('sanitized', yaml.dump(removeEmptyObjects(obj))); + }, + }); +}; + +// Remove empty elements from an object +function removeEmptyObjects(obj: any): any { + if (typeof obj !== 'object' || obj === null) { + return obj; + } + + const newObj: any = Array.isArray(obj) ? [] : {}; + + for (const key in obj) { + const value = obj[key]; + const newValue = removeEmptyObjects(value); + if ( + !( + newValue === null || + newValue === undefined || + (typeof newValue === 'object' && Object.keys(newValue).length === 0) + ) + ) { + newObj[key] = newValue; + } + } + + return newObj; +} diff --git a/packages/backend/src/plugins/scaffolder.ts b/packages/backend/src/plugins/scaffolder.ts index fb9e0d6..0b6fb2f 100644 --- a/packages/backend/src/plugins/scaffolder.ts +++ b/packages/backend/src/plugins/scaffolder.ts @@ -1,95 +1,44 @@ -import { CatalogClient } from '@backstage/catalog-client'; -import {createBuiltinActions, createRouter} from '@backstage/plugin-scaffolder-backend'; -import { Router } from 'express'; -import type { PluginEnvironment } from '../types'; import { ScmIntegrations } from '@backstage/integration'; -import {createPublishGiteaAction} from "./gitea-actions"; -import {createArgoCDApp} from "./argocd"; +import { createPublishGiteaAction } from './gitea-actions'; import { - createZipAction, - createSleepAction, - createWriteFileAction, - createAppendFileAction, - createMergeJSONAction, - createMergeAction, - createParseFileAction, - createSerializeYamlAction, - createSerializeJsonAction, - createJSONataAction, - createYamlJSONataTransformAction, - createJsonJSONataTransformAction, - createReplaceInFileAction -} from '@roadiehq/scaffolder-backend-module-utils'; + coreServices, + createBackendModule, +} from '@backstage/backend-plugin-api'; +import { scaffolderActionsExtensionPoint } from '@backstage/plugin-scaffolder-node/alpha'; +import { createArgoCDApp } from './argocd'; +import { getRootLogger } from '@backstage/backend-common'; +import { createKubernetesApply } from './k8s-apply'; +import { createSanitizeResource } from './sanitize'; +import { createVerifyDependency } from './verify'; -import { - createSanitizeResource, - createVerifyDependency, - createKubernetesApply, -} from '@jessesanford/plugin-scaffolder-actions'; +export const cnoeScaffolderActions = createBackendModule({ + pluginId: 'scaffolder', + moduleId: 'cnoe-actions', + register(env) { + env.registerInit({ + deps: { + scaffolder: scaffolderActionsExtensionPoint, + config: coreServices.rootConfig, + }, + async init({ scaffolder, config }) { + const integrations = ScmIntegrations.fromConfig(config); + const logger = getRootLogger(); -export default async function createPlugin( - env: PluginEnvironment, -): Promise { - const catalogClient = new CatalogClient({ - discoveryApi: env.discovery, - }); - - const integrations = ScmIntegrations.fromConfig(env.config); - const builtInActions = createBuiltinActions({ - integrations, - catalogClient, - config: env.config, - reader: env.reader, - }); - - const options = { - integrations: integrations, - config: env.config, - } - const argocdOptions = { - config: env.config, - logger: env.logger - } - - const cnoeActions = [ - createPublishGiteaAction(options), - createArgoCDApp(argocdOptions), - createSanitizeResource(), - createVerifyDependency(), - createKubernetesApply(env.config) - ] - - const roadieUtilActions = [ - createZipAction(), - createSleepAction(), - createWriteFileAction(), - createAppendFileAction(), - createMergeJSONAction({}), - createMergeAction(), - createParseFileAction(), - createSerializeYamlAction(), - createSerializeJsonAction(), - createJSONataAction(), - createYamlJSONataTransformAction(), - createJsonJSONataTransformAction(), - createReplaceInFileAction() - ] - - const actions = [ - ...builtInActions, - ...cnoeActions, - ...roadieUtilActions - ]; - - return await createRouter({ - actions: actions, - logger: env.logger, - config: env.config, - database: env.database, - reader: env.reader, - catalogClient, - identity: env.identity, - permissions: env.permissions, - }); -} + scaffolder.addActions( + createPublishGiteaAction({ + integrations, + config, + }), + createArgoCDApp({ + config, + logger, + }), + createKubernetesApply(config), + createSanitizeResource(), + createVerifyDependency(), + ); + }, + }); + }, +}); diff --git a/packages/backend/src/plugins/search.ts b/packages/backend/src/plugins/search.ts deleted file mode 100644 index 467ac60..0000000 --- a/packages/backend/src/plugins/search.ts +++ /dev/null @@ -1,66 +0,0 @@ -import { useHotCleanup } from '@backstage/backend-common'; -import { createRouter } from '@backstage/plugin-search-backend'; -import { - IndexBuilder, - LunrSearchEngine, -} from '@backstage/plugin-search-backend-node'; -import { PluginEnvironment } from '../types'; -import { DefaultCatalogCollatorFactory } from '@backstage/plugin-search-backend-module-catalog'; -import { DefaultTechDocsCollatorFactory } from '@backstage/plugin-search-backend-module-techdocs'; -import { Router } from 'express'; - -export default async function createPlugin( - env: PluginEnvironment, -): Promise { - // Initialize a connection to a search engine. - const searchEngine = new LunrSearchEngine({ - logger: env.logger, - }); - const indexBuilder = new IndexBuilder({ - logger: env.logger, - searchEngine, - }); - - const schedule = env.scheduler.createScheduledTaskRunner({ - frequency: { minutes: 10 }, - timeout: { minutes: 15 }, - // A 3 second delay gives the backend server a chance to initialize before - // any collators are executed, which may attempt requests against the API. - initialDelay: { seconds: 3 }, - }); - - // Collators are responsible for gathering documents known to plugins. This - // collator gathers entities from the software catalog. - indexBuilder.addCollator({ - schedule, - factory: DefaultCatalogCollatorFactory.fromConfig(env.config, { - discovery: env.discovery, - tokenManager: env.tokenManager, - }), - }); - - // collator gathers entities from techdocs. - indexBuilder.addCollator({ - schedule, - factory: DefaultTechDocsCollatorFactory.fromConfig(env.config, { - discovery: env.discovery, - logger: env.logger, - tokenManager: env.tokenManager, - }), - }); - - // The scheduler controls when documents are gathered from collators and sent - // to the search engine for indexing. - const { scheduler } = await indexBuilder.build(); - scheduler.start(); - - useHotCleanup(module, () => scheduler.stop()); - - return await createRouter({ - engine: indexBuilder.getSearchEngine(), - types: indexBuilder.getDocumentTypes(), - permissions: env.permissions, - config: env.config, - logger: env.logger, - }); -} diff --git a/packages/backend/src/plugins/techdocs.ts b/packages/backend/src/plugins/techdocs.ts deleted file mode 100644 index be8bb0c..0000000 --- a/packages/backend/src/plugins/techdocs.ts +++ /dev/null @@ -1,51 +0,0 @@ -import { DockerContainerRunner } from '@backstage/backend-common'; -import { - createRouter, - Generators, - Preparers, - Publisher, -} from '@backstage/plugin-techdocs-backend'; -import Docker from 'dockerode'; -import { Router } from 'express'; -import { PluginEnvironment } from '../types'; - -export default async function createPlugin( - env: PluginEnvironment, -): Promise { - // Preparers are responsible for fetching source files for documentation. - const preparers = await Preparers.fromConfig(env.config, { - logger: env.logger, - reader: env.reader, - }); - - // Docker client (conditionally) used by the generators, based on techdocs.generators config. - const dockerClient = new Docker(); - const containerRunner = new DockerContainerRunner({ dockerClient }); - - // Generators are used for generating documentation sites. - const generators = await Generators.fromConfig(env.config, { - logger: env.logger, - containerRunner, - }); - - // Publisher is used for - // 1. Publishing generated files to storage - // 2. Fetching files from storage and passing them to TechDocs frontend. - const publisher = await Publisher.fromConfig(env.config, { - logger: env.logger, - discovery: env.discovery, - }); - - // checks if the publisher is working and logs the result - await publisher.getReadiness(); - - return await createRouter({ - preparers, - generators, - publisher, - logger: env.logger, - config: env.config, - discovery: env.discovery, - cache: env.cache, - }); -} diff --git a/packages/backend/src/plugins/verify.ts b/packages/backend/src/plugins/verify.ts new file mode 100644 index 0000000..ae7f56b --- /dev/null +++ b/packages/backend/src/plugins/verify.ts @@ -0,0 +1,69 @@ +import { executeShellCommand } from '@backstage/plugin-scaffolder-node'; +import { createTemplateAction } from '@backstage/plugin-scaffolder-node'; +import { Writable } from 'stream'; + +class ConsoleLogStream extends Writable { + data: string; + + constructor(options: any) { + super(options); + this.data = ''; + } + + _write(chunk: any, _: any, callback: any) { + this.data += chunk.toString(); // Convert the chunk to a string and append it to this.data + console.log(this.data); + callback(); + } +} + +export const createVerifyDependency = () => { + return createTemplateAction<{ + verifiers: string[]; + }>({ + id: 'cnoe:verify:dependency', + schema: { + input: { + type: 'object', + required: ['verifiers'], + properties: { + verifiers: { + type: 'array', + items: { + type: 'string', + }, + title: 'verifiers', + description: 'The list of verifiers', + }, + }, + }, + }, + async handler(ctx) { + const verifiers = ctx.input.verifiers; + + if (verifiers === null || verifiers.length === 0) { + ctx.logger.error('no verifier was supplied for the object'); + return; + } + + const baseCommand = 'cnoe'; + const baseArguments = ['k8s', 'verify']; + + verifiers.forEach((verifier: string) => + baseArguments.push('--config', verifier), + ); + + const logStream = new ConsoleLogStream({}); + await executeShellCommand({ + command: baseCommand, + args: baseArguments, + logStream: logStream, + }) + .then(() => ctx.logger.info('verification succeeded')) + .catch(error => { + ctx.logger.error(error); + throw new Error(logStream.data); + }); + }, + }); +}; diff --git a/packages/backend/src/types.ts b/packages/backend/src/types.ts index 9cd2c74..0dad120 100644 --- a/packages/backend/src/types.ts +++ b/packages/backend/src/types.ts @@ -5,9 +5,8 @@ import { PluginDatabaseManager, PluginEndpointDiscovery, TokenManager, - UrlReader, -} from '@backstage/backend-common'; -import { PluginTaskScheduler } from '@backstage/backend-tasks'; +} from '@backstage/backend-common/dist'; //TODO: deprecated +import { PluginTaskScheduler } from '@backstage/backend-tasks/dist'; import { PermissionEvaluator } from '@backstage/plugin-permission-common'; import { IdentityApi } from '@backstage/plugin-auth-node'; @@ -16,7 +15,6 @@ export type PluginEnvironment = { database: PluginDatabaseManager; cache: PluginCacheManager; config: Config; - reader: UrlReader; discovery: PluginEndpointDiscovery; tokenManager: TokenManager; scheduler: PluginTaskScheduler; diff --git a/plugins/apache-spark/.eslintrc.js b/plugins/apache-spark/.eslintrc.js deleted file mode 100644 index e2a53a6..0000000 --- a/plugins/apache-spark/.eslintrc.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require('@backstage/cli/config/eslint-factory')(__dirname); diff --git a/plugins/apache-spark/README.md b/plugins/apache-spark/README.md deleted file mode 100644 index 976aba2..0000000 --- a/plugins/apache-spark/README.md +++ /dev/null @@ -1,13 +0,0 @@ -# apache-spark - -Welcome to the apache-spark plugin! - -_This plugin was created through the Backstage CLI_ - -## Getting started - -Your plugin has been added to the example app in this repository, meaning you'll be able to access it by running `yarn start` in the root directory, and then navigating to [/apache-spark](http://localhost:3000/apache-spark). - -You can also serve the plugin in isolation by running `yarn start` in the plugin directory. -This method of serving the plugin provides quicker iteration speed and a faster startup and hot reloads. -It is only meant for local development, and the setup for it can be found inside the [/dev](./dev) directory. diff --git a/plugins/apache-spark/dev/index.tsx b/plugins/apache-spark/dev/index.tsx deleted file mode 100644 index 5f2b474..0000000 --- a/plugins/apache-spark/dev/index.tsx +++ /dev/null @@ -1,12 +0,0 @@ -import React from 'react'; -import { createDevApp } from '@backstage/dev-utils'; -import { apacheSparkPlugin, ApacheSparkPage } from '../src/plugin'; - -createDevApp() - .registerPlugin(apacheSparkPlugin) - .addPage({ - element: , - title: 'Root Page', - path: '/apache-spark' - }) - .render(); diff --git a/plugins/apache-spark/package.json b/plugins/apache-spark/package.json deleted file mode 100644 index 7553c7f..0000000 --- a/plugins/apache-spark/package.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "name": "@internal/plugin-apache-spark", - "version": "0.1.0", - "main": "src/index.ts", - "types": "src/index.ts", - "license": "Apache-2.0", - "private": true, - "publishConfig": { - "access": "public", - "main": "dist/index.esm.js", - "types": "dist/index.d.ts" - }, - "backstage": { - "role": "frontend-plugin" - }, - "sideEffects": false, - "scripts": { - "start": "backstage-cli package start", - "build": "backstage-cli package build", - "lint": "backstage-cli package lint", - "test": "backstage-cli package test", - "clean": "backstage-cli package clean", - "prepack": "backstage-cli package prepack", - "postpack": "backstage-cli package postpack" - }, - "dependencies": { - "@backstage/core-components": "^0.13.8", - "@backstage/core-plugin-api": "^1.8.2", - "@backstage/theme": "^0.5.0", - "@material-ui/core": "^4.9.13", - "@material-ui/icons": "^4.9.1", - "@material-ui/lab": "^4.0.0-alpha.61", - "react-use": "^17.2.4" - }, - "peerDependencies": { - "react": "^16.13.1 || ^17.0.0" - }, - "devDependencies": { - "@backstage/cli": "^0.25.1", - "@backstage/core-app-api": "^1.11.3", - "@backstage/dev-utils": "^1.0.26", - "@backstage/test-utils": "^1.4.7", - "@testing-library/jest-dom": "^5.10.1", - "@testing-library/react": "^12.1.3", - "@testing-library/user-event": "^14.0.0", - "msw": "^1.0.0" - }, - "files": [ - "dist" - ] -} diff --git a/plugins/apache-spark/src/api/index.test.ts b/plugins/apache-spark/src/api/index.test.ts deleted file mode 100644 index 20f775b..0000000 --- a/plugins/apache-spark/src/api/index.test.ts +++ /dev/null @@ -1,113 +0,0 @@ -// import { ApacheSparkClient } from './index'; -// import { ApacheSpark } from './model'; -// -// const mockKubernetesApi = { -// proxy: jest.fn(), -// getClusters: jest.fn(), -// getObjectsByEntity: jest.fn(), -// getWorkloadsByEntity: jest.fn(), -// getCustomObjectsByEntity: jest.fn(), -// }; -// -// describe('ApacheSparkClient', () => { -// let apacheSparkClient: ApacheSparkClient; -// -// beforeEach(() => { -// apacheSparkClient = new ApacheSparkClient(mockKubernetesApi); -// }); -// -// afterEach(() => { -// jest.clearAllMocks(); -// }); -// -// it('should fetch Spark application logs', async () => { -// mockKubernetesApi.proxy.mockResolvedValue({ -// ok: true, -// text: () => { -// return 'logs'; -// }, -// }); -// const logs = await apacheSparkClient.getLogs( -// 'cluster1', -// 'spark-namespace', -// 'spark-pod-name', -// 'abc', -// ); -// expect(logs).toEqual('logs'); -// expect(mockKubernetesApi.proxy).toHaveBeenCalledWith({ -// clusterName: 'cluster1', -// path: '/api/v1/namespaces/spark-namespace/pods/spark-pod-name/log?tailLines=1000&container=abc', -// }); -// }); -// -// it('should throw error if Spark application logs are not fetched', async () => { -// mockKubernetesApi.proxy.mockResolvedValueOnce({ -// status: 500, -// statusText: 'Internal Server Error', -// ok: false, -// text: () => { -// return 'oh noes'; -// }, -// }); -// -// await expect( -// apacheSparkClient.getLogs( -// 'spark-app-name', -// 'spark-namespace', -// 'spark-pod-name', -// 'abc', -// ), -// ).rejects.toEqual( -// 'failed to fetch logs: 500, Internal Server Error, oh noes', -// ); -// }); -// -// // test getSparkApp method -// it('should fetch Spark application', async () => { -// // @ts-ignore -// const mockResponse: ApacheSpark = { -// apiVersion: 'sparkoperator.k8s.io/v1beta2', -// kind: 'SparkApplication', -// metadata: { -// name: 'spark-app-name', -// namespace: 'spark-namespace', -// labels: { -// app: 'spark-app-name', -// }, -// creationTimestamp: '2021-01-01T00:00:00Z', -// }, -// spec: { -// image: 'abc', -// mainApplicationFile: 'main.py', -// mode: 'cluster', -// sparkVersion: 'v3.1.1.', -// type: 'Python', -// driver: { -// cores: 1, -// }, -// executor: { -// cores: 1, -// }, -// }, -// status: { -// applicationState: { -// state: 'RUNNING', -// }, -// }, -// }; -// -// mockKubernetesApi.proxy.mockResolvedValue({ -// ok: true, -// text: () => { -// return JSON.stringify(mockResponse); -// }, -// }); -// -// const application = await apacheSparkClient.getSparkApp( -// 'spark-app-name', -// 'spark-namespace', -// 'abc', -// ); -// expect(application).toEqual(mockResponse); -// }); -// }); diff --git a/plugins/apache-spark/src/api/index.ts b/plugins/apache-spark/src/api/index.ts deleted file mode 100644 index cda9454..0000000 --- a/plugins/apache-spark/src/api/index.ts +++ /dev/null @@ -1,176 +0,0 @@ -import { createApiRef } from '@backstage/core-plugin-api'; -import { ApacheSpark, ApacheSparkList, Pod } from './model'; -import { KubernetesApi } from '@backstage/plugin-kubernetes'; - -export const apacheSparkApiRef = createApiRef({ - id: 'plugin.apachespark', -}); - -const API_VERSION = 'sparkoperator.k8s.io/v1beta2'; -const SPARK_APP_PLURAL = 'sparkapplications'; -const K8s_API_TIMEOUT = 'timeoutSeconds'; - -export interface ApacheSparkApi { - getSparkApps( - clusterName: string | undefined, - namespace: string | undefined, - labels: string | undefined, - ): Promise; - - getSparkApp( - clusterName: string | undefined, - namespace: string | undefined, - name: string, - ): Promise; - - getLogs( - clusterName: string | undefined, - namespace: string | undefined, - podName: string, - containerName?: string | undefined, - tailLine?: number, - ): Promise; - - getContainers( - clusterName: string | undefined, - namespace: string | undefined, - podName: string, - ): Promise; -} - -export class ApacheSparkClient implements ApacheSparkApi { - private kubernetesApi: KubernetesApi; - constructor(kubernetesApi: KubernetesApi) { - this.kubernetesApi = kubernetesApi; - } - async getSparkApps( - clusterName: string | undefined, - namespace: string | undefined, - labels: string | undefined, - ): Promise { - const ns = namespace !== undefined ? namespace : 'default'; - const path = `/apis/${API_VERSION}/namespaces/${ns}/${SPARK_APP_PLURAL}`; - const query = new URLSearchParams({ - [K8s_API_TIMEOUT]: '30', - }); - if (labels) { - query.set('labelSelector', labels); - } - const resp = await this.kubernetesApi.proxy({ - clusterName: - clusterName !== undefined ? clusterName : await this.getFirstCluster(), - path: `${path}?${query.toString()}`, - }); - - if (!resp.ok) { - return Promise.reject( - `failed to fetch resources: ${resp.status}, ${ - resp.statusText - }, ${await resp.text()}`, - ); - } - const out = JSON.parse(await resp.text()); - this.removeManagedField(out); - return out; - } - - async getSparkApp( - clusterName: string | undefined, - namespace: string | undefined, - name: string, - ): Promise { - const ns = namespace !== undefined ? namespace : 'default'; - const path = `/apis/${API_VERSION}/namespaces/${ns}/${SPARK_APP_PLURAL}/${name}`; - const resp = await this.kubernetesApi.proxy({ - clusterName: - clusterName !== undefined ? clusterName : await this.getFirstCluster(), - path: `${path}`, - }); - if (!resp.ok) { - return Promise.reject( - `failed to fetch resources: ${resp.status}, ${ - resp.statusText - }, ${await resp.text()}`, - ); - } - const out = JSON.parse(await resp.text()); - this.removeManagedField(out); - return out; - } - - async getLogs( - clusterName: string | undefined, - namespace: string | undefined, - podName: string, - containerName: string | undefined, - tailLine: number = 1000, - ): Promise { - const ns = namespace !== undefined ? namespace : 'default'; - const path = `/api/v1/namespaces/${ns}/pods/${podName}/log`; - const query = new URLSearchParams({ - tailLines: tailLine.toString(), - }); - if (containerName) { - query.set('container', containerName); - } - - const resp = await this.kubernetesApi.proxy({ - clusterName: - clusterName !== undefined ? clusterName : await this.getFirstCluster(), - path: `${path}?${query.toString()}`, - }); - if (!resp.ok) { - return Promise.reject( - `failed to fetch logs: ${resp.status}, ${ - resp.statusText - }, ${await resp.text()}`, - ); - } - return resp.text(); - } - - async getContainers( - clusterName: string | undefined, - namespace: string | undefined, - podName: string, - ): Promise { - const ns = namespace !== undefined ? namespace : 'default'; - const path = `/api/v1/namespaces/${ns}/pods/${podName}`; - const query = new URLSearchParams({ - [K8s_API_TIMEOUT]: '30', - }); - const resp = await this.kubernetesApi.proxy({ - clusterName: - clusterName !== undefined ? clusterName : await this.getFirstCluster(), - path: `${path}?${query.toString()}`, - }); - if (!resp.ok) { - throw new Error( - `failed to fetch logs: ${resp.status}, ${ - resp.statusText - }, ${await resp.text()}`, - ); - } - const pod = JSON.parse(await resp.text()) as Pod; - return pod.spec.containers.map(c => c.name); - } - - async getFirstCluster(): Promise { - const clusters = await this.kubernetesApi.getClusters(); - if (clusters.length > 0) { - return Promise.resolve(clusters[0].name); - } - return Promise.reject('no clusters found in configuration'); - } - - removeManagedField(spark: any) { - if (spark.metadata?.hasOwnProperty('managedFields')) { - delete spark.metadata.managedFields; - } - if (spark.items) { - for (const i of spark.items) { - this.removeManagedField(i); - } - } - } -} diff --git a/plugins/apache-spark/src/api/model.ts b/plugins/apache-spark/src/api/model.ts deleted file mode 100644 index 1d6455c..0000000 --- a/plugins/apache-spark/src/api/model.ts +++ /dev/null @@ -1,100 +0,0 @@ -export type Metadata = { - name: string; - namespace?: string; - labels?: Record; - annotations?: Record; - creationTimestamp: string; - managedFields?: any; -}; - -export type Spec = { - arguments?: string[]; - batchScheduler?: string; - driver: { - coreLimit?: string; - coreRequest?: string; - cores?: number; - gpu?: { - name: string; - quantity: number; - }; - labels?: Record; - memory?: string; - memoryOverhead?: string; - podName?: string; - schedulerName?: string; - serviceAccount?: string; - }; - executor: { - coreLimit?: string; - coreRequest?: string; - cores?: number; - gpu?: { - name: string; - quantity: number; - }; - instances?: number; - labels?: Record; - memory?: string; - memoryOverhead?: string; - schedulerName?: string; - serviceAccount?: string; - }; - image: string; - mainClass?: string; - mainApplicationFile?: string; - mode: string; - pythonVersion?: string; - sparkVersion: string; - type: string; -}; - -export type Status = { - applicationState: { - errorMessage?: string; - state: string; - }; - driverInfo?: { - podName: string; - webUIAddress: string; - webUIIngressAddress: string; - webUIIngressName: string; - webUIPort: string; - webUIServiceName: string; - }; - executionAttempts?: number; - executorState?: { [key: string]: string }; - lastSubmissionAttemptTime?: string; - sparkApplicationId?: string; - submissionAttempts?: number; - submissionID?: string; - terminationTime?: string; -}; - -export type ApacheSpark = { - apiVersion: string; - kind: string; - metadata: Metadata; - spec: Spec; - status: Status; -}; - -export type ApacheSparkList = { - apiVersion: string; - kind: string; - items?: ApacheSpark[]; -}; - -export type Pod = { - apiVersion: string; - kind: string; - metadata: Metadata; - spec: PodSpec; -}; - -export type PodSpec = { - containers: { - image: string; - name: string; - }[]; -}; diff --git a/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.test.tsx b/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.test.tsx deleted file mode 100644 index 01d3ade..0000000 --- a/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.test.tsx +++ /dev/null @@ -1,83 +0,0 @@ -import React from 'react'; -import { render, screen } from '@testing-library/react'; -import { useApi } from '@backstage/core-plugin-api'; -import { useEntity } from '@backstage/plugin-catalog-react'; -import useAsync from 'react-use/lib/useAsync'; -import { ApacheSpark } from '../../api/model'; -import { ApacheSparkDriverLogs } from './ApacheSparkLogs'; -import { - APACHE_SPARK_LABEL_SELECTOR_ANNOTATION, - CLUSTER_NAME_ANNOTATION, - K8S_NAMESPACE_ANNOTATION, -} from '../../consts'; - -jest.mock('@backstage/core-plugin-api'); -jest.mock('react-use/lib/useAsync'); -jest.mock('@backstage/plugin-catalog-react'); - -jest.mock('@backstage/core-components', () => ({ - LogViewer: (props: { text: string }) => { - return
{props.text}
; - }, -})); - -describe('ApacheSparkDriverLogs', () => { - const mockUseApi = useApi as jest.MockedFunction; - const mockUseAsync = useAsync as jest.MockedFunction; - const mockUseEntity = useEntity as jest.MockedFunction; - const mockGetLogs = jest.fn(); - const mockSparkApp = { - status: { - driverInfo: { - podName: 'test-pod', - }, - }, - } as ApacheSpark; - - beforeEach(() => { - mockUseApi.mockReturnValue({ - getLogs: mockGetLogs, - }); - mockUseEntity.mockReturnValue({ - entity: { - apiVersion: 'version', - kind: 'kind', - metadata: { - name: 'name', - namespace: 'ns1', - annotations: { - [K8S_NAMESPACE_ANNOTATION]: 'k8s-ns', - [CLUSTER_NAME_ANNOTATION]: 'my-cluster', - [APACHE_SPARK_LABEL_SELECTOR_ANNOTATION]: 'env=test', - }, - }, - }, - }); - }); - - afterEach(() => { - jest.clearAllMocks(); - }); - - it('should render error message if there is an error', () => { - mockUseAsync.mockReturnValue({ - value: undefined, - loading: false, - error: new Error('Test error'), - }); - - render(); - expect(screen.getByText('Error: Test error')).toBeInTheDocument(); - expect(screen.getByRole('alert')).toBeInTheDocument(); - }); - - it('should render the log viewer with the fetched logs', async () => { - mockUseAsync.mockReturnValue({ - value: 'test logs', - loading: false, - error: undefined, - }); - render(); - expect(screen.getByText('test logs')).toBeInTheDocument(); - }); -}); diff --git a/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.tsx b/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.tsx deleted file mode 100644 index e892856..0000000 --- a/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.tsx +++ /dev/null @@ -1,100 +0,0 @@ -import { useApi } from '@backstage/core-plugin-api'; -import { apacheSparkApiRef } from '../../api'; -import useAsync from 'react-use/lib/useAsync'; -import { ApacheSpark } from '../../api/model'; -import { - LogViewer, - Progress, - Select, - SelectedItems, - SelectItem, -} from '@backstage/core-components'; -import Alert from '@material-ui/lab/Alert'; -import React, { useEffect, useState } from 'react'; -import { useEntity } from '@backstage/plugin-catalog-react'; -import { getAnnotationValues } from '../utils'; - -export const ApacheSparkDriverLogs = (props: { sparkApp: ApacheSpark }) => { - const apiClient = useApi(apacheSparkApiRef); - const { entity } = useEntity(); - const { ns, clusterName } = getAnnotationValues(entity); - - const { value, loading, error } = useAsync(async (): Promise => { - return await apiClient.getLogs( - clusterName, - ns, - props.sparkApp.status.driverInfo?.podName!, - 'spark-kubernetes-driver', - ); - }, [props]); - if (loading) { - return ; - } else if (error) { - return {`${error}`}; - } - return ; -}; - -const ExecutorLogs = (props: { name: string }) => { - const apiClient = useApi(apacheSparkApiRef); - const { entity } = useEntity(); - const [logs, setLogs] = useState(''); - const { ns, clusterName } = getAnnotationValues(entity); - - useEffect(() => { - async function getLogs() { - try { - const val = await apiClient.getLogs( - clusterName, - ns, - props.name, - 'spark-kubernetes-executor', - ); - setLogs(val); - } catch (e) { - if (typeof e === 'string') { - setLogs(e); - } - } - } - if (props.name !== '') { - getLogs(); - } - }, [apiClient, clusterName, ns, props]); - - return ; -}; - -export const ApacheSparkExecutorLogs = (props: { sparkApp: ApacheSpark }) => { - const [selected, setSelected] = useState(''); - if (props.sparkApp.status.applicationState.state !== 'RUNNING') { - return ( - - Executor logs are only available for Spark Applications in RUNNING state - - ); - } - const executors: SelectItem[] = [{ label: '', value: '' }]; - for (const key in props.sparkApp.status.executorState) { - if (props.sparkApp.status.executorState.hasOwnProperty(key)) { - executors.push({ label: key, value: key }); - } - } - - const handleChange = (item: SelectedItems) => { - if (typeof item === 'string' && item !== '') { - setSelected(item); - } - }; - return ( - <> -